4c99ddfe89cb70518e0141eaeee61a685209cbf1
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47
48 #include "arm-tdep.h"
49 #include "gdb/sim-arm.h"
50
51 #include "elf-bfd.h"
52 #include "coff/internal.h"
53 #include "elf/arm.h"
54
55 #include "vec.h"
56
57 #include "record.h"
58 #include "record-full.h"
59
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
67
68 static int arm_debug;
69
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
73
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
76
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
79
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
82
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
85
86 struct arm_mapping_symbol
87 {
88 bfd_vma value;
89 char type;
90 };
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
93
94 struct arm_per_objfile
95 {
96 VEC(arm_mapping_symbol_s) **section_maps;
97 };
98
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
102
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
106 {
107 "auto",
108 "softfpa",
109 "fpa",
110 "softvfp",
111 "vfp",
112 NULL
113 };
114
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
118
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
121 {
122 "auto",
123 "APCS",
124 "AAPCS",
125 NULL
126 };
127
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
131
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
134 {
135 "auto",
136 "arm",
137 "thumb",
138 NULL
139 };
140
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
143
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
150
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
153
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
158 static const struct
159 {
160 const char *name;
161 int regnum;
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
164 { "r0", 0 },
165 { "r1", 1 },
166 { "r2", 2 },
167 { "r3", 3 },
168 { "r4", 4 },
169 { "r5", 5 },
170 { "r6", 6 },
171 { "r7", 7 },
172 { "r8", 8 },
173 { "r9", 9 },
174 { "r10", 10 },
175 { "r11", 11 },
176 { "r12", 12 },
177 { "r13", 13 },
178 { "r14", 14 },
179 { "r15", 15 },
180 /* Synonyms (argument and variable registers). */
181 { "a1", 0 },
182 { "a2", 1 },
183 { "a3", 2 },
184 { "a4", 3 },
185 { "v1", 4 },
186 { "v2", 5 },
187 { "v3", 6 },
188 { "v4", 7 },
189 { "v5", 8 },
190 { "v6", 9 },
191 { "v7", 10 },
192 { "v8", 11 },
193 /* Other platform-specific names for r9. */
194 { "sb", 9 },
195 { "tr", 9 },
196 /* Special names. */
197 { "ip", 12 },
198 { "lr", 14 },
199 /* Names used by GCC (not listed in the ARM EABI). */
200 { "sl", 10 },
201 /* A special name from the older ATPCS. */
202 { "wr", 7 },
203 };
204
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
213
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
216
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
219
220 /* This is used to keep the bfd arch_info in sync with the disassembly
221 style. */
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
225
226 static void convert_from_extended (const struct floatformat *, const void *,
227 void *, int);
228 static void convert_to_extended (const struct floatformat *, void *,
229 const void *, int);
230
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
237
238 static int thumb_insn_size (unsigned short inst1);
239
240 struct arm_prologue_cache
241 {
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
245 CORE_ADDR prev_sp;
246
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
250
251 int framesize;
252
253 /* The register used to hold the frame pointer for this frame. */
254 int framereg;
255
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
258 };
259
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
264
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
267
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
269
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
275
276 /* Set to true if the 32-bit mode is in use. */
277
278 int arm_apcs_32 = 1;
279
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
281
282 int
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
284 {
285 if (gdbarch_tdep (gdbarch)->is_m)
286 return XPSR_T;
287 else
288 return CPSR_T;
289 }
290
291 /* Determine if FRAME is executing in Thumb mode. */
292
293 int
294 arm_frame_is_thumb (struct frame_info *frame)
295 {
296 CORE_ADDR cpsr;
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
298
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
304
305 return (cpsr & t_bit) != 0;
306 }
307
308 /* Callback for VEC_lower_bound. */
309
310 static inline int
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
313 {
314 return lhs->value < rhs->value;
315 }
316
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
320
321 static char
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
323 {
324 struct obj_section *sec;
325
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
328 if (sec != NULL)
329 {
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
333 0 };
334 unsigned int idx;
335
336 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
337 arm_objfile_data_key);
338 if (data != NULL)
339 {
340 map = data->section_maps[sec->the_bfd_section->index];
341 if (!VEC_empty (arm_mapping_symbol_s, map))
342 {
343 struct arm_mapping_symbol *map_sym;
344
345 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
346 arm_compare_mapping_symbols);
347
348 /* VEC_lower_bound finds the earliest ordered insertion
349 point. If the following symbol starts at this exact
350 address, we use that; otherwise, the preceding
351 mapping symbol covers this address. */
352 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 {
354 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
355 if (map_sym->value == map_key.value)
356 {
357 if (start)
358 *start = map_sym->value + obj_section_addr (sec);
359 return map_sym->type;
360 }
361 }
362
363 if (idx > 0)
364 {
365 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 if (start)
367 *start = map_sym->value + obj_section_addr (sec);
368 return map_sym->type;
369 }
370 }
371 }
372 }
373
374 return 0;
375 }
376
377 /* Determine if the program counter specified in MEMADDR is in a Thumb
378 function. This function should be called for addresses unrelated to
379 any executing frame; otherwise, prefer arm_frame_is_thumb. */
380
381 int
382 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 {
384 struct bound_minimal_symbol sym;
385 char type;
386 struct displaced_step_closure* dsc
387 = get_displaced_step_closure_by_addr(memaddr);
388
389 /* If checking the mode of displaced instruction in copy area, the mode
390 should be determined by instruction on the original address. */
391 if (dsc)
392 {
393 if (debug_displaced)
394 fprintf_unfiltered (gdb_stdlog,
395 "displaced: check mode of %.8lx instead of %.8lx\n",
396 (unsigned long) dsc->insn_addr,
397 (unsigned long) memaddr);
398 memaddr = dsc->insn_addr;
399 }
400
401 /* If bit 0 of the address is set, assume this is a Thumb address. */
402 if (IS_THUMB_ADDR (memaddr))
403 return 1;
404
405 /* Respect internal mode override if active. */
406 if (arm_override_mode != -1)
407 return arm_override_mode;
408
409 /* If the user wants to override the symbol table, let him. */
410 if (strcmp (arm_force_mode_string, "arm") == 0)
411 return 0;
412 if (strcmp (arm_force_mode_string, "thumb") == 0)
413 return 1;
414
415 /* ARM v6-M and v7-M are always in Thumb mode. */
416 if (gdbarch_tdep (gdbarch)->is_m)
417 return 1;
418
419 /* If there are mapping symbols, consult them. */
420 type = arm_find_mapping_symbol (memaddr, NULL);
421 if (type)
422 return type == 't';
423
424 /* Thumb functions have a "special" bit set in minimal symbols. */
425 sym = lookup_minimal_symbol_by_pc (memaddr);
426 if (sym.minsym)
427 return (MSYMBOL_IS_SPECIAL (sym.minsym));
428
429 /* If the user wants to override the fallback mode, let them. */
430 if (strcmp (arm_fallback_mode_string, "arm") == 0)
431 return 0;
432 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
433 return 1;
434
435 /* If we couldn't find any symbol, but we're talking to a running
436 target, then trust the current value of $cpsr. This lets
437 "display/i $pc" always show the correct mode (though if there is
438 a symbol table we will not reach here, so it still may not be
439 displayed in the mode it will be executed). */
440 if (target_has_registers)
441 return arm_frame_is_thumb (get_current_frame ());
442
443 /* Otherwise we're out of luck; we assume ARM. */
444 return 0;
445 }
446
447 /* Remove useless bits from addresses in a running program. */
448 static CORE_ADDR
449 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
450 {
451 /* On M-profile devices, do not strip the low bit from EXC_RETURN
452 (the magic exception return address). */
453 if (gdbarch_tdep (gdbarch)->is_m
454 && (val & 0xfffffff0) == 0xfffffff0)
455 return val;
456
457 if (arm_apcs_32)
458 return UNMAKE_THUMB_ADDR (val);
459 else
460 return (val & 0x03fffffc);
461 }
462
463 /* Return 1 if PC is the start of a compiler helper function which
464 can be safely ignored during prologue skipping. IS_THUMB is true
465 if the function is known to be a Thumb function due to the way it
466 is being called. */
467 static int
468 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
469 {
470 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
471 struct bound_minimal_symbol msym;
472
473 msym = lookup_minimal_symbol_by_pc (pc);
474 if (msym.minsym != NULL
475 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
476 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
477 {
478 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
479
480 /* The GNU linker's Thumb call stub to foo is named
481 __foo_from_thumb. */
482 if (strstr (name, "_from_thumb") != NULL)
483 name += 2;
484
485 /* On soft-float targets, __truncdfsf2 is called to convert promoted
486 arguments to their argument types in non-prototyped
487 functions. */
488 if (startswith (name, "__truncdfsf2"))
489 return 1;
490 if (startswith (name, "__aeabi_d2f"))
491 return 1;
492
493 /* Internal functions related to thread-local storage. */
494 if (startswith (name, "__tls_get_addr"))
495 return 1;
496 if (startswith (name, "__aeabi_read_tp"))
497 return 1;
498 }
499 else
500 {
501 /* If we run against a stripped glibc, we may be unable to identify
502 special functions by name. Check for one important case,
503 __aeabi_read_tp, by comparing the *code* against the default
504 implementation (this is hand-written ARM assembler in glibc). */
505
506 if (!is_thumb
507 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
508 == 0xe3e00a0f /* mov r0, #0xffff0fff */
509 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
510 == 0xe240f01f) /* sub pc, r0, #31 */
511 return 1;
512 }
513
514 return 0;
515 }
516
517 /* Support routines for instruction parsing. */
518 #define submask(x) ((1L << ((x) + 1)) - 1)
519 #define bit(obj,st) (((obj) >> (st)) & 1)
520 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
521 #define sbits(obj,st,fn) \
522 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
523 #define BranchDest(addr,instr) \
524 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525
526 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
527 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 instruction. */
529 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
530 ((bits ((insn1), 0, 3) << 12) \
531 | (bits ((insn1), 10, 10) << 11) \
532 | (bits ((insn2), 12, 14) << 8) \
533 | bits ((insn2), 0, 7))
534
535 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
536 the 32-bit instruction. */
537 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
538 ((bits ((insn), 16, 19) << 12) \
539 | bits ((insn), 0, 11))
540
541 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
542
543 static unsigned int
544 thumb_expand_immediate (unsigned int imm)
545 {
546 unsigned int count = imm >> 7;
547
548 if (count < 8)
549 switch (count / 2)
550 {
551 case 0:
552 return imm & 0xff;
553 case 1:
554 return (imm & 0xff) | ((imm & 0xff) << 16);
555 case 2:
556 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
557 case 3:
558 return (imm & 0xff) | ((imm & 0xff) << 8)
559 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
560 }
561
562 return (0x80 | (imm & 0x7f)) << (32 - count);
563 }
564
565 /* Return 1 if the 16-bit Thumb instruction INST might change
566 control flow, 0 otherwise. */
567
568 static int
569 thumb_instruction_changes_pc (unsigned short inst)
570 {
571 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
572 return 1;
573
574 if ((inst & 0xf000) == 0xd000) /* conditional branch */
575 return 1;
576
577 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
578 return 1;
579
580 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
581 return 1;
582
583 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
584 return 1;
585
586 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
587 return 1;
588
589 return 0;
590 }
591
592 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
593 might change control flow, 0 otherwise. */
594
595 static int
596 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
597 {
598 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
599 {
600 /* Branches and miscellaneous control instructions. */
601
602 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
603 {
604 /* B, BL, BLX. */
605 return 1;
606 }
607 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
608 {
609 /* SUBS PC, LR, #imm8. */
610 return 1;
611 }
612 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
613 {
614 /* Conditional branch. */
615 return 1;
616 }
617
618 return 0;
619 }
620
621 if ((inst1 & 0xfe50) == 0xe810)
622 {
623 /* Load multiple or RFE. */
624
625 if (bit (inst1, 7) && !bit (inst1, 8))
626 {
627 /* LDMIA or POP */
628 if (bit (inst2, 15))
629 return 1;
630 }
631 else if (!bit (inst1, 7) && bit (inst1, 8))
632 {
633 /* LDMDB */
634 if (bit (inst2, 15))
635 return 1;
636 }
637 else if (bit (inst1, 7) && bit (inst1, 8))
638 {
639 /* RFEIA */
640 return 1;
641 }
642 else if (!bit (inst1, 7) && !bit (inst1, 8))
643 {
644 /* RFEDB */
645 return 1;
646 }
647
648 return 0;
649 }
650
651 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
652 {
653 /* MOV PC or MOVS PC. */
654 return 1;
655 }
656
657 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
658 {
659 /* LDR PC. */
660 if (bits (inst1, 0, 3) == 15)
661 return 1;
662 if (bit (inst1, 7))
663 return 1;
664 if (bit (inst2, 11))
665 return 1;
666 if ((inst2 & 0x0fc0) == 0x0000)
667 return 1;
668
669 return 0;
670 }
671
672 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
673 {
674 /* TBB. */
675 return 1;
676 }
677
678 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
679 {
680 /* TBH. */
681 return 1;
682 }
683
684 return 0;
685 }
686
687 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
688 epilogue, 0 otherwise. */
689
690 static int
691 thumb_instruction_restores_sp (unsigned short insn)
692 {
693 return (insn == 0x46bd /* mov sp, r7 */
694 || (insn & 0xff80) == 0xb000 /* add sp, imm */
695 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
696 }
697
698 /* Analyze a Thumb prologue, looking for a recognizable stack frame
699 and frame pointer. Scan until we encounter a store that could
700 clobber the stack frame unexpectedly, or an unknown instruction.
701 Return the last address which is definitely safe to skip for an
702 initial breakpoint. */
703
704 static CORE_ADDR
705 thumb_analyze_prologue (struct gdbarch *gdbarch,
706 CORE_ADDR start, CORE_ADDR limit,
707 struct arm_prologue_cache *cache)
708 {
709 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
710 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
711 int i;
712 pv_t regs[16];
713 struct pv_area *stack;
714 struct cleanup *back_to;
715 CORE_ADDR offset;
716 CORE_ADDR unrecognized_pc = 0;
717
718 for (i = 0; i < 16; i++)
719 regs[i] = pv_register (i, 0);
720 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
721 back_to = make_cleanup_free_pv_area (stack);
722
723 while (start < limit)
724 {
725 unsigned short insn;
726
727 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
728
729 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
730 {
731 int regno;
732 int mask;
733
734 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
735 break;
736
737 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
738 whether to save LR (R14). */
739 mask = (insn & 0xff) | ((insn & 0x100) << 6);
740
741 /* Calculate offsets of saved R0-R7 and LR. */
742 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
743 if (mask & (1 << regno))
744 {
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 -4);
747 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
748 }
749 }
750 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
751 {
752 offset = (insn & 0x7f) << 2; /* get scaled offset */
753 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
754 -offset);
755 }
756 else if (thumb_instruction_restores_sp (insn))
757 {
758 /* Don't scan past the epilogue. */
759 break;
760 }
761 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
762 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
763 (insn & 0xff) << 2);
764 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
765 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
766 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
767 bits (insn, 6, 8));
768 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
769 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
770 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
771 bits (insn, 0, 7));
772 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
773 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
774 && pv_is_constant (regs[bits (insn, 3, 5)]))
775 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
776 regs[bits (insn, 6, 8)]);
777 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
778 && pv_is_constant (regs[bits (insn, 3, 6)]))
779 {
780 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
781 int rm = bits (insn, 3, 6);
782 regs[rd] = pv_add (regs[rd], regs[rm]);
783 }
784 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 {
786 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
787 int src_reg = (insn & 0x78) >> 3;
788 regs[dst_reg] = regs[src_reg];
789 }
790 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 {
792 /* Handle stores to the stack. Normally pushes are used,
793 but with GCC -mtpcs-frame, there may be other stores
794 in the prologue to create the frame. */
795 int regno = (insn >> 8) & 0x7;
796 pv_t addr;
797
798 offset = (insn & 0xff) << 2;
799 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
800
801 if (pv_area_store_would_trash (stack, addr))
802 break;
803
804 pv_area_store (stack, addr, 4, regs[regno]);
805 }
806 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 {
808 int rd = bits (insn, 0, 2);
809 int rn = bits (insn, 3, 5);
810 pv_t addr;
811
812 offset = bits (insn, 6, 10) << 2;
813 addr = pv_add_constant (regs[rn], offset);
814
815 if (pv_area_store_would_trash (stack, addr))
816 break;
817
818 pv_area_store (stack, addr, 4, regs[rd]);
819 }
820 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
821 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
823 /* Ignore stores of argument registers to the stack. */
824 ;
825 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
826 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
827 /* Ignore block loads from the stack, potentially copying
828 parameters from memory. */
829 ;
830 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
831 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
832 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
833 /* Similarly ignore single loads from the stack. */
834 ;
835 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
836 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
837 /* Skip register copies, i.e. saves to another register
838 instead of the stack. */
839 ;
840 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
841 /* Recognize constant loads; even with small stacks these are necessary
842 on Thumb. */
843 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
844 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 {
846 /* Constant pool loads, for the same reason. */
847 unsigned int constant;
848 CORE_ADDR loc;
849
850 loc = start + 4 + bits (insn, 0, 7) * 4;
851 constant = read_memory_unsigned_integer (loc, 4, byte_order);
852 regs[bits (insn, 8, 10)] = pv_constant (constant);
853 }
854 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
855 {
856 unsigned short inst2;
857
858 inst2 = read_memory_unsigned_integer (start + 2, 2,
859 byte_order_for_code);
860
861 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
862 {
863 /* BL, BLX. Allow some special function calls when
864 skipping the prologue; GCC generates these before
865 storing arguments to the stack. */
866 CORE_ADDR nextpc;
867 int j1, j2, imm1, imm2;
868
869 imm1 = sbits (insn, 0, 10);
870 imm2 = bits (inst2, 0, 10);
871 j1 = bit (inst2, 13);
872 j2 = bit (inst2, 11);
873
874 offset = ((imm1 << 12) + (imm2 << 1));
875 offset ^= ((!j2) << 22) | ((!j1) << 23);
876
877 nextpc = start + 4 + offset;
878 /* For BLX make sure to clear the low bits. */
879 if (bit (inst2, 12) == 0)
880 nextpc = nextpc & 0xfffffffc;
881
882 if (!skip_prologue_function (gdbarch, nextpc,
883 bit (inst2, 12) != 0))
884 break;
885 }
886
887 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
888 { registers } */
889 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
890 {
891 pv_t addr = regs[bits (insn, 0, 3)];
892 int regno;
893
894 if (pv_area_store_would_trash (stack, addr))
895 break;
896
897 /* Calculate offsets of saved registers. */
898 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
899 if (inst2 & (1 << regno))
900 {
901 addr = pv_add_constant (addr, -4);
902 pv_area_store (stack, addr, 4, regs[regno]);
903 }
904
905 if (insn & 0x0020)
906 regs[bits (insn, 0, 3)] = addr;
907 }
908
909 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
910 [Rn, #+/-imm]{!} */
911 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
912 {
913 int regno1 = bits (inst2, 12, 15);
914 int regno2 = bits (inst2, 8, 11);
915 pv_t addr = regs[bits (insn, 0, 3)];
916
917 offset = inst2 & 0xff;
918 if (insn & 0x0080)
919 addr = pv_add_constant (addr, offset);
920 else
921 addr = pv_add_constant (addr, -offset);
922
923 if (pv_area_store_would_trash (stack, addr))
924 break;
925
926 pv_area_store (stack, addr, 4, regs[regno1]);
927 pv_area_store (stack, pv_add_constant (addr, 4),
928 4, regs[regno2]);
929
930 if (insn & 0x0020)
931 regs[bits (insn, 0, 3)] = addr;
932 }
933
934 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
935 && (inst2 & 0x0c00) == 0x0c00
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 {
938 int regno = bits (inst2, 12, 15);
939 pv_t addr = regs[bits (insn, 0, 3)];
940
941 offset = inst2 & 0xff;
942 if (inst2 & 0x0200)
943 addr = pv_add_constant (addr, offset);
944 else
945 addr = pv_add_constant (addr, -offset);
946
947 if (pv_area_store_would_trash (stack, addr))
948 break;
949
950 pv_area_store (stack, addr, 4, regs[regno]);
951
952 if (inst2 & 0x0100)
953 regs[bits (insn, 0, 3)] = addr;
954 }
955
956 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
957 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 {
959 int regno = bits (inst2, 12, 15);
960 pv_t addr;
961
962 offset = inst2 & 0xfff;
963 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
964
965 if (pv_area_store_would_trash (stack, addr))
966 break;
967
968 pv_area_store (stack, addr, 4, regs[regno]);
969 }
970
971 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
972 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
973 /* Ignore stores of argument registers to the stack. */
974 ;
975
976 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
977 && (inst2 & 0x0d00) == 0x0c00
978 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
979 /* Ignore stores of argument registers to the stack. */
980 ;
981
982 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
983 { registers } */
984 && (inst2 & 0x8000) == 0x0000
985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
986 /* Ignore block loads from the stack, potentially copying
987 parameters from memory. */
988 ;
989
990 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 [Rn, #+/-imm] */
992 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
993 /* Similarly ignore dual loads from the stack. */
994 ;
995
996 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
997 && (inst2 & 0x0d00) == 0x0c00
998 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
999 /* Similarly ignore single loads from the stack. */
1000 ;
1001
1002 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1003 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1004 /* Similarly ignore single loads from the stack. */
1005 ;
1006
1007 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1008 && (inst2 & 0x8000) == 0x0000)
1009 {
1010 unsigned int imm = ((bits (insn, 10, 10) << 11)
1011 | (bits (inst2, 12, 14) << 8)
1012 | bits (inst2, 0, 7));
1013
1014 regs[bits (inst2, 8, 11)]
1015 = pv_add_constant (regs[bits (insn, 0, 3)],
1016 thumb_expand_immediate (imm));
1017 }
1018
1019 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1020 && (inst2 & 0x8000) == 0x0000)
1021 {
1022 unsigned int imm = ((bits (insn, 10, 10) << 11)
1023 | (bits (inst2, 12, 14) << 8)
1024 | bits (inst2, 0, 7));
1025
1026 regs[bits (inst2, 8, 11)]
1027 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1028 }
1029
1030 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1031 && (inst2 & 0x8000) == 0x0000)
1032 {
1033 unsigned int imm = ((bits (insn, 10, 10) << 11)
1034 | (bits (inst2, 12, 14) << 8)
1035 | bits (inst2, 0, 7));
1036
1037 regs[bits (inst2, 8, 11)]
1038 = pv_add_constant (regs[bits (insn, 0, 3)],
1039 - (CORE_ADDR) thumb_expand_immediate (imm));
1040 }
1041
1042 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1043 && (inst2 & 0x8000) == 0x0000)
1044 {
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1048
1049 regs[bits (inst2, 8, 11)]
1050 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1051 }
1052
1053 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 {
1055 unsigned int imm = ((bits (insn, 10, 10) << 11)
1056 | (bits (inst2, 12, 14) << 8)
1057 | bits (inst2, 0, 7));
1058
1059 regs[bits (inst2, 8, 11)]
1060 = pv_constant (thumb_expand_immediate (imm));
1061 }
1062
1063 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1064 {
1065 unsigned int imm
1066 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1067
1068 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1069 }
1070
1071 else if (insn == 0xea5f /* mov.w Rd,Rm */
1072 && (inst2 & 0xf0f0) == 0)
1073 {
1074 int dst_reg = (inst2 & 0x0f00) >> 8;
1075 int src_reg = inst2 & 0xf;
1076 regs[dst_reg] = regs[src_reg];
1077 }
1078
1079 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 {
1081 /* Constant pool loads. */
1082 unsigned int constant;
1083 CORE_ADDR loc;
1084
1085 offset = bits (inst2, 0, 11);
1086 if (insn & 0x0080)
1087 loc = start + 4 + offset;
1088 else
1089 loc = start + 4 - offset;
1090
1091 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1092 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1093 }
1094
1095 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 {
1097 /* Constant pool loads. */
1098 unsigned int constant;
1099 CORE_ADDR loc;
1100
1101 offset = bits (inst2, 0, 7) << 2;
1102 if (insn & 0x0080)
1103 loc = start + 4 + offset;
1104 else
1105 loc = start + 4 - offset;
1106
1107 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1108 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1109
1110 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1111 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1112 }
1113
1114 else if (thumb2_instruction_changes_pc (insn, inst2))
1115 {
1116 /* Don't scan past anything that might change control flow. */
1117 break;
1118 }
1119 else
1120 {
1121 /* The optimizer might shove anything into the prologue,
1122 so we just skip what we don't recognize. */
1123 unrecognized_pc = start;
1124 }
1125
1126 start += 2;
1127 }
1128 else if (thumb_instruction_changes_pc (insn))
1129 {
1130 /* Don't scan past anything that might change control flow. */
1131 break;
1132 }
1133 else
1134 {
1135 /* The optimizer might shove anything into the prologue,
1136 so we just skip what we don't recognize. */
1137 unrecognized_pc = start;
1138 }
1139
1140 start += 2;
1141 }
1142
1143 if (arm_debug)
1144 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1145 paddress (gdbarch, start));
1146
1147 if (unrecognized_pc == 0)
1148 unrecognized_pc = start;
1149
1150 if (cache == NULL)
1151 {
1152 do_cleanups (back_to);
1153 return unrecognized_pc;
1154 }
1155
1156 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1157 {
1158 /* Frame pointer is fp. Frame size is constant. */
1159 cache->framereg = ARM_FP_REGNUM;
1160 cache->framesize = -regs[ARM_FP_REGNUM].k;
1161 }
1162 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1163 {
1164 /* Frame pointer is r7. Frame size is constant. */
1165 cache->framereg = THUMB_FP_REGNUM;
1166 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1167 }
1168 else
1169 {
1170 /* Try the stack pointer... this is a bit desperate. */
1171 cache->framereg = ARM_SP_REGNUM;
1172 cache->framesize = -regs[ARM_SP_REGNUM].k;
1173 }
1174
1175 for (i = 0; i < 16; i++)
1176 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1177 cache->saved_regs[i].addr = offset;
1178
1179 do_cleanups (back_to);
1180 return unrecognized_pc;
1181 }
1182
1183
1184 /* Try to analyze the instructions starting from PC, which load symbol
1185 __stack_chk_guard. Return the address of instruction after loading this
1186 symbol, set the dest register number to *BASEREG, and set the size of
1187 instructions for loading symbol in OFFSET. Return 0 if instructions are
1188 not recognized. */
1189
1190 static CORE_ADDR
1191 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1192 unsigned int *destreg, int *offset)
1193 {
1194 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1195 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1196 unsigned int low, high, address;
1197
1198 address = 0;
1199 if (is_thumb)
1200 {
1201 unsigned short insn1
1202 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1203
1204 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 {
1206 *destreg = bits (insn1, 8, 10);
1207 *offset = 2;
1208 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1209 address = read_memory_unsigned_integer (address, 4,
1210 byte_order_for_code);
1211 }
1212 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 {
1214 unsigned short insn2
1215 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1216
1217 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1218
1219 insn1
1220 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1221 insn2
1222 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1223
1224 /* movt Rd, #const */
1225 if ((insn1 & 0xfbc0) == 0xf2c0)
1226 {
1227 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1228 *destreg = bits (insn2, 8, 11);
1229 *offset = 8;
1230 address = (high << 16 | low);
1231 }
1232 }
1233 }
1234 else
1235 {
1236 unsigned int insn
1237 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1238
1239 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1240 {
1241 address = bits (insn, 0, 11) + pc + 8;
1242 address = read_memory_unsigned_integer (address, 4,
1243 byte_order_for_code);
1244
1245 *destreg = bits (insn, 12, 15);
1246 *offset = 4;
1247 }
1248 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1249 {
1250 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1251
1252 insn
1253 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1254
1255 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1256 {
1257 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1258 *destreg = bits (insn, 12, 15);
1259 *offset = 8;
1260 address = (high << 16 | low);
1261 }
1262 }
1263 }
1264
1265 return address;
1266 }
1267
1268 /* Try to skip a sequence of instructions used for stack protector. If PC
1269 points to the first instruction of this sequence, return the address of
1270 first instruction after this sequence, otherwise, return original PC.
1271
1272 On arm, this sequence of instructions is composed of mainly three steps,
1273 Step 1: load symbol __stack_chk_guard,
1274 Step 2: load from address of __stack_chk_guard,
1275 Step 3: store it to somewhere else.
1276
1277 Usually, instructions on step 2 and step 3 are the same on various ARM
1278 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1279 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1280 instructions in step 1 vary from different ARM architectures. On ARMv7,
1281 they are,
1282
1283 movw Rn, #:lower16:__stack_chk_guard
1284 movt Rn, #:upper16:__stack_chk_guard
1285
1286 On ARMv5t, it is,
1287
1288 ldr Rn, .Label
1289 ....
1290 .Lable:
1291 .word __stack_chk_guard
1292
1293 Since ldr/str is a very popular instruction, we can't use them as
1294 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1295 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1296 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1297
1298 static CORE_ADDR
1299 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1300 {
1301 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1302 unsigned int basereg;
1303 struct bound_minimal_symbol stack_chk_guard;
1304 int offset;
1305 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1306 CORE_ADDR addr;
1307
1308 /* Try to parse the instructions in Step 1. */
1309 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1310 &basereg, &offset);
1311 if (!addr)
1312 return pc;
1313
1314 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1315 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1316 Otherwise, this sequence cannot be for stack protector. */
1317 if (stack_chk_guard.minsym == NULL
1318 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1319 return pc;
1320
1321 if (is_thumb)
1322 {
1323 unsigned int destreg;
1324 unsigned short insn
1325 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1326
1327 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1328 if ((insn & 0xf800) != 0x6800)
1329 return pc;
1330 if (bits (insn, 3, 5) != basereg)
1331 return pc;
1332 destreg = bits (insn, 0, 2);
1333
1334 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1335 byte_order_for_code);
1336 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1337 if ((insn & 0xf800) != 0x6000)
1338 return pc;
1339 if (destreg != bits (insn, 0, 2))
1340 return pc;
1341 }
1342 else
1343 {
1344 unsigned int destreg;
1345 unsigned int insn
1346 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1347
1348 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1349 if ((insn & 0x0e500000) != 0x04100000)
1350 return pc;
1351 if (bits (insn, 16, 19) != basereg)
1352 return pc;
1353 destreg = bits (insn, 12, 15);
1354 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1355 insn = read_memory_unsigned_integer (pc + offset + 4,
1356 4, byte_order_for_code);
1357 if ((insn & 0x0e500000) != 0x04000000)
1358 return pc;
1359 if (bits (insn, 12, 15) != destreg)
1360 return pc;
1361 }
1362 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1363 on arm. */
1364 if (is_thumb)
1365 return pc + offset + 4;
1366 else
1367 return pc + offset + 8;
1368 }
1369
1370 /* Advance the PC across any function entry prologue instructions to
1371 reach some "real" code.
1372
1373 The APCS (ARM Procedure Call Standard) defines the following
1374 prologue:
1375
1376 mov ip, sp
1377 [stmfd sp!, {a1,a2,a3,a4}]
1378 stmfd sp!, {...,fp,ip,lr,pc}
1379 [stfe f7, [sp, #-12]!]
1380 [stfe f6, [sp, #-12]!]
1381 [stfe f5, [sp, #-12]!]
1382 [stfe f4, [sp, #-12]!]
1383 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1384
1385 static CORE_ADDR
1386 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1387 {
1388 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1389 unsigned long inst;
1390 CORE_ADDR func_addr, limit_pc;
1391
1392 /* See if we can determine the end of the prologue via the symbol table.
1393 If so, then return either PC, or the PC after the prologue, whichever
1394 is greater. */
1395 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1396 {
1397 CORE_ADDR post_prologue_pc
1398 = skip_prologue_using_sal (gdbarch, func_addr);
1399 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1400
1401 if (post_prologue_pc)
1402 post_prologue_pc
1403 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1404
1405
1406 /* GCC always emits a line note before the prologue and another
1407 one after, even if the two are at the same address or on the
1408 same line. Take advantage of this so that we do not need to
1409 know every instruction that might appear in the prologue. We
1410 will have producer information for most binaries; if it is
1411 missing (e.g. for -gstabs), assuming the GNU tools. */
1412 if (post_prologue_pc
1413 && (cust == NULL
1414 || COMPUNIT_PRODUCER (cust) == NULL
1415 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1416 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1417 return post_prologue_pc;
1418
1419 if (post_prologue_pc != 0)
1420 {
1421 CORE_ADDR analyzed_limit;
1422
1423 /* For non-GCC compilers, make sure the entire line is an
1424 acceptable prologue; GDB will round this function's
1425 return value up to the end of the following line so we
1426 can not skip just part of a line (and we do not want to).
1427
1428 RealView does not treat the prologue specially, but does
1429 associate prologue code with the opening brace; so this
1430 lets us skip the first line if we think it is the opening
1431 brace. */
1432 if (arm_pc_is_thumb (gdbarch, func_addr))
1433 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1434 post_prologue_pc, NULL);
1435 else
1436 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1437 post_prologue_pc, NULL);
1438
1439 if (analyzed_limit != post_prologue_pc)
1440 return func_addr;
1441
1442 return post_prologue_pc;
1443 }
1444 }
1445
1446 /* Can't determine prologue from the symbol table, need to examine
1447 instructions. */
1448
1449 /* Find an upper limit on the function prologue using the debug
1450 information. If the debug information could not be used to provide
1451 that bound, then use an arbitrary large number as the upper bound. */
1452 /* Like arm_scan_prologue, stop no later than pc + 64. */
1453 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1454 if (limit_pc == 0)
1455 limit_pc = pc + 64; /* Magic. */
1456
1457
1458 /* Check if this is Thumb code. */
1459 if (arm_pc_is_thumb (gdbarch, pc))
1460 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1461 else
1462 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1463 }
1464
1465 /* *INDENT-OFF* */
1466 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1467 This function decodes a Thumb function prologue to determine:
1468 1) the size of the stack frame
1469 2) which registers are saved on it
1470 3) the offsets of saved regs
1471 4) the offset from the stack pointer to the frame pointer
1472
1473 A typical Thumb function prologue would create this stack frame
1474 (offsets relative to FP)
1475 old SP -> 24 stack parameters
1476 20 LR
1477 16 R7
1478 R7 -> 0 local variables (16 bytes)
1479 SP -> -12 additional stack space (12 bytes)
1480 The frame size would thus be 36 bytes, and the frame offset would be
1481 12 bytes. The frame register is R7.
1482
1483 The comments for thumb_skip_prolog() describe the algorithm we use
1484 to detect the end of the prolog. */
1485 /* *INDENT-ON* */
1486
1487 static void
1488 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1489 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1490 {
1491 CORE_ADDR prologue_start;
1492 CORE_ADDR prologue_end;
1493
1494 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1495 &prologue_end))
1496 {
1497 /* See comment in arm_scan_prologue for an explanation of
1498 this heuristics. */
1499 if (prologue_end > prologue_start + 64)
1500 {
1501 prologue_end = prologue_start + 64;
1502 }
1503 }
1504 else
1505 /* We're in the boondocks: we have no idea where the start of the
1506 function is. */
1507 return;
1508
1509 prologue_end = min (prologue_end, prev_pc);
1510
1511 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1512 }
1513
1514 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1515
1516 static int
1517 arm_instruction_changes_pc (uint32_t this_instr)
1518 {
1519 if (bits (this_instr, 28, 31) == INST_NV)
1520 /* Unconditional instructions. */
1521 switch (bits (this_instr, 24, 27))
1522 {
1523 case 0xa:
1524 case 0xb:
1525 /* Branch with Link and change to Thumb. */
1526 return 1;
1527 case 0xc:
1528 case 0xd:
1529 case 0xe:
1530 /* Coprocessor register transfer. */
1531 if (bits (this_instr, 12, 15) == 15)
1532 error (_("Invalid update to pc in instruction"));
1533 return 0;
1534 default:
1535 return 0;
1536 }
1537 else
1538 switch (bits (this_instr, 25, 27))
1539 {
1540 case 0x0:
1541 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1542 {
1543 /* Multiplies and extra load/stores. */
1544 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1545 /* Neither multiplies nor extension load/stores are allowed
1546 to modify PC. */
1547 return 0;
1548
1549 /* Otherwise, miscellaneous instructions. */
1550
1551 /* BX <reg>, BXJ <reg>, BLX <reg> */
1552 if (bits (this_instr, 4, 27) == 0x12fff1
1553 || bits (this_instr, 4, 27) == 0x12fff2
1554 || bits (this_instr, 4, 27) == 0x12fff3)
1555 return 1;
1556
1557 /* Other miscellaneous instructions are unpredictable if they
1558 modify PC. */
1559 return 0;
1560 }
1561 /* Data processing instruction. Fall through. */
1562
1563 case 0x1:
1564 if (bits (this_instr, 12, 15) == 15)
1565 return 1;
1566 else
1567 return 0;
1568
1569 case 0x2:
1570 case 0x3:
1571 /* Media instructions and architecturally undefined instructions. */
1572 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1573 return 0;
1574
1575 /* Stores. */
1576 if (bit (this_instr, 20) == 0)
1577 return 0;
1578
1579 /* Loads. */
1580 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1581 return 1;
1582 else
1583 return 0;
1584
1585 case 0x4:
1586 /* Load/store multiple. */
1587 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1588 return 1;
1589 else
1590 return 0;
1591
1592 case 0x5:
1593 /* Branch and branch with link. */
1594 return 1;
1595
1596 case 0x6:
1597 case 0x7:
1598 /* Coprocessor transfers or SWIs can not affect PC. */
1599 return 0;
1600
1601 default:
1602 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1603 }
1604 }
1605
1606 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1607 otherwise. */
1608
1609 static int
1610 arm_instruction_restores_sp (unsigned int insn)
1611 {
1612 if (bits (insn, 28, 31) != INST_NV)
1613 {
1614 if ((insn & 0x0df0f000) == 0x0080d000
1615 /* ADD SP (register or immediate). */
1616 || (insn & 0x0df0f000) == 0x0040d000
1617 /* SUB SP (register or immediate). */
1618 || (insn & 0x0ffffff0) == 0x01a0d000
1619 /* MOV SP. */
1620 || (insn & 0x0fff0000) == 0x08bd0000
1621 /* POP (LDMIA). */
1622 || (insn & 0x0fff0000) == 0x049d0000)
1623 /* POP of a single register. */
1624 return 1;
1625 }
1626
1627 return 0;
1628 }
1629
1630 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1631 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1632 fill it in. Return the first address not recognized as a prologue
1633 instruction.
1634
1635 We recognize all the instructions typically found in ARM prologues,
1636 plus harmless instructions which can be skipped (either for analysis
1637 purposes, or a more restrictive set that can be skipped when finding
1638 the end of the prologue). */
1639
1640 static CORE_ADDR
1641 arm_analyze_prologue (struct gdbarch *gdbarch,
1642 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1643 struct arm_prologue_cache *cache)
1644 {
1645 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1646 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1647 int regno;
1648 CORE_ADDR offset, current_pc;
1649 pv_t regs[ARM_FPS_REGNUM];
1650 struct pv_area *stack;
1651 struct cleanup *back_to;
1652 CORE_ADDR unrecognized_pc = 0;
1653
1654 /* Search the prologue looking for instructions that set up the
1655 frame pointer, adjust the stack pointer, and save registers.
1656
1657 Be careful, however, and if it doesn't look like a prologue,
1658 don't try to scan it. If, for instance, a frameless function
1659 begins with stmfd sp!, then we will tell ourselves there is
1660 a frame, which will confuse stack traceback, as well as "finish"
1661 and other operations that rely on a knowledge of the stack
1662 traceback. */
1663
1664 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1665 regs[regno] = pv_register (regno, 0);
1666 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1667 back_to = make_cleanup_free_pv_area (stack);
1668
1669 for (current_pc = prologue_start;
1670 current_pc < prologue_end;
1671 current_pc += 4)
1672 {
1673 unsigned int insn
1674 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1675
1676 if (insn == 0xe1a0c00d) /* mov ip, sp */
1677 {
1678 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1679 continue;
1680 }
1681 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1682 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1683 {
1684 unsigned imm = insn & 0xff; /* immediate value */
1685 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1686 int rd = bits (insn, 12, 15);
1687 imm = (imm >> rot) | (imm << (32 - rot));
1688 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1689 continue;
1690 }
1691 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1692 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1693 {
1694 unsigned imm = insn & 0xff; /* immediate value */
1695 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1696 int rd = bits (insn, 12, 15);
1697 imm = (imm >> rot) | (imm << (32 - rot));
1698 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1699 continue;
1700 }
1701 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1702 [sp, #-4]! */
1703 {
1704 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1705 break;
1706 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1707 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1708 regs[bits (insn, 12, 15)]);
1709 continue;
1710 }
1711 else if ((insn & 0xffff0000) == 0xe92d0000)
1712 /* stmfd sp!, {..., fp, ip, lr, pc}
1713 or
1714 stmfd sp!, {a1, a2, a3, a4} */
1715 {
1716 int mask = insn & 0xffff;
1717
1718 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1719 break;
1720
1721 /* Calculate offsets of saved registers. */
1722 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1723 if (mask & (1 << regno))
1724 {
1725 regs[ARM_SP_REGNUM]
1726 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1727 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1728 }
1729 }
1730 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1731 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1732 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1733 {
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1735 continue;
1736 }
1737 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1738 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1739 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1740 {
1741 /* No need to add this to saved_regs -- it's just an arg reg. */
1742 continue;
1743 }
1744 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1745 { registers } */
1746 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1747 {
1748 /* No need to add this to saved_regs -- it's just arg regs. */
1749 continue;
1750 }
1751 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1752 {
1753 unsigned imm = insn & 0xff; /* immediate value */
1754 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1755 imm = (imm >> rot) | (imm << (32 - rot));
1756 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1757 }
1758 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1759 {
1760 unsigned imm = insn & 0xff; /* immediate value */
1761 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1762 imm = (imm >> rot) | (imm << (32 - rot));
1763 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1764 }
1765 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1766 [sp, -#c]! */
1767 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1768 {
1769 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1770 break;
1771
1772 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1773 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1774 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1775 }
1776 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1777 [sp!] */
1778 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1779 {
1780 int n_saved_fp_regs;
1781 unsigned int fp_start_reg, fp_bound_reg;
1782
1783 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1784 break;
1785
1786 if ((insn & 0x800) == 0x800) /* N0 is set */
1787 {
1788 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs = 3;
1790 else
1791 n_saved_fp_regs = 1;
1792 }
1793 else
1794 {
1795 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1796 n_saved_fp_regs = 2;
1797 else
1798 n_saved_fp_regs = 4;
1799 }
1800
1801 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1802 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1803 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1804 {
1805 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1806 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1807 regs[fp_start_reg++]);
1808 }
1809 }
1810 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1811 {
1812 /* Allow some special function calls when skipping the
1813 prologue; GCC generates these before storing arguments to
1814 the stack. */
1815 CORE_ADDR dest = BranchDest (current_pc, insn);
1816
1817 if (skip_prologue_function (gdbarch, dest, 0))
1818 continue;
1819 else
1820 break;
1821 }
1822 else if ((insn & 0xf0000000) != 0xe0000000)
1823 break; /* Condition not true, exit early. */
1824 else if (arm_instruction_changes_pc (insn))
1825 /* Don't scan past anything that might change control flow. */
1826 break;
1827 else if (arm_instruction_restores_sp (insn))
1828 {
1829 /* Don't scan past the epilogue. */
1830 break;
1831 }
1832 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1833 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1834 /* Ignore block loads from the stack, potentially copying
1835 parameters from memory. */
1836 continue;
1837 else if ((insn & 0xfc500000) == 0xe4100000
1838 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1839 /* Similarly ignore single loads from the stack. */
1840 continue;
1841 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1842 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1843 register instead of the stack. */
1844 continue;
1845 else
1846 {
1847 /* The optimizer might shove anything into the prologue, if
1848 we build up cache (cache != NULL) from scanning prologue,
1849 we just skip what we don't recognize and scan further to
1850 make cache as complete as possible. However, if we skip
1851 prologue, we'll stop immediately on unrecognized
1852 instruction. */
1853 unrecognized_pc = current_pc;
1854 if (cache != NULL)
1855 continue;
1856 else
1857 break;
1858 }
1859 }
1860
1861 if (unrecognized_pc == 0)
1862 unrecognized_pc = current_pc;
1863
1864 if (cache)
1865 {
1866 int framereg, framesize;
1867
1868 /* The frame size is just the distance from the frame register
1869 to the original stack pointer. */
1870 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1871 {
1872 /* Frame pointer is fp. */
1873 framereg = ARM_FP_REGNUM;
1874 framesize = -regs[ARM_FP_REGNUM].k;
1875 }
1876 else
1877 {
1878 /* Try the stack pointer... this is a bit desperate. */
1879 framereg = ARM_SP_REGNUM;
1880 framesize = -regs[ARM_SP_REGNUM].k;
1881 }
1882
1883 cache->framereg = framereg;
1884 cache->framesize = framesize;
1885
1886 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1887 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1888 cache->saved_regs[regno].addr = offset;
1889 }
1890
1891 if (arm_debug)
1892 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1893 paddress (gdbarch, unrecognized_pc));
1894
1895 do_cleanups (back_to);
1896 return unrecognized_pc;
1897 }
1898
1899 static void
1900 arm_scan_prologue (struct frame_info *this_frame,
1901 struct arm_prologue_cache *cache)
1902 {
1903 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1904 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1905 int regno;
1906 CORE_ADDR prologue_start, prologue_end, current_pc;
1907 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1908 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1909 pv_t regs[ARM_FPS_REGNUM];
1910 struct pv_area *stack;
1911 struct cleanup *back_to;
1912 CORE_ADDR offset;
1913
1914 /* Assume there is no frame until proven otherwise. */
1915 cache->framereg = ARM_SP_REGNUM;
1916 cache->framesize = 0;
1917
1918 /* Check for Thumb prologue. */
1919 if (arm_frame_is_thumb (this_frame))
1920 {
1921 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1922 return;
1923 }
1924
1925 /* Find the function prologue. If we can't find the function in
1926 the symbol table, peek in the stack frame to find the PC. */
1927 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1928 &prologue_end))
1929 {
1930 /* One way to find the end of the prologue (which works well
1931 for unoptimized code) is to do the following:
1932
1933 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1934
1935 if (sal.line == 0)
1936 prologue_end = prev_pc;
1937 else if (sal.end < prologue_end)
1938 prologue_end = sal.end;
1939
1940 This mechanism is very accurate so long as the optimizer
1941 doesn't move any instructions from the function body into the
1942 prologue. If this happens, sal.end will be the last
1943 instruction in the first hunk of prologue code just before
1944 the first instruction that the scheduler has moved from
1945 the body to the prologue.
1946
1947 In order to make sure that we scan all of the prologue
1948 instructions, we use a slightly less accurate mechanism which
1949 may scan more than necessary. To help compensate for this
1950 lack of accuracy, the prologue scanning loop below contains
1951 several clauses which'll cause the loop to terminate early if
1952 an implausible prologue instruction is encountered.
1953
1954 The expression
1955
1956 prologue_start + 64
1957
1958 is a suitable endpoint since it accounts for the largest
1959 possible prologue plus up to five instructions inserted by
1960 the scheduler. */
1961
1962 if (prologue_end > prologue_start + 64)
1963 {
1964 prologue_end = prologue_start + 64; /* See above. */
1965 }
1966 }
1967 else
1968 {
1969 /* We have no symbol information. Our only option is to assume this
1970 function has a standard stack frame and the normal frame register.
1971 Then, we can find the value of our frame pointer on entrance to
1972 the callee (or at the present moment if this is the innermost frame).
1973 The value stored there should be the address of the stmfd + 8. */
1974 CORE_ADDR frame_loc;
1975 LONGEST return_value;
1976
1977 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1978 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1979 return;
1980 else
1981 {
1982 prologue_start = gdbarch_addr_bits_remove
1983 (gdbarch, return_value) - 8;
1984 prologue_end = prologue_start + 64; /* See above. */
1985 }
1986 }
1987
1988 if (prev_pc < prologue_end)
1989 prologue_end = prev_pc;
1990
1991 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1992 }
1993
1994 static struct arm_prologue_cache *
1995 arm_make_prologue_cache (struct frame_info *this_frame)
1996 {
1997 int reg;
1998 struct arm_prologue_cache *cache;
1999 CORE_ADDR unwound_fp;
2000
2001 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2002 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2003
2004 arm_scan_prologue (this_frame, cache);
2005
2006 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2007 if (unwound_fp == 0)
2008 return cache;
2009
2010 cache->prev_sp = unwound_fp + cache->framesize;
2011
2012 /* Calculate actual addresses of saved registers using offsets
2013 determined by arm_scan_prologue. */
2014 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2015 if (trad_frame_addr_p (cache->saved_regs, reg))
2016 cache->saved_regs[reg].addr += cache->prev_sp;
2017
2018 return cache;
2019 }
2020
2021 /* Implementation of the stop_reason hook for arm_prologue frames. */
2022
2023 static enum unwind_stop_reason
2024 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2025 void **this_cache)
2026 {
2027 struct arm_prologue_cache *cache;
2028 CORE_ADDR pc;
2029
2030 if (*this_cache == NULL)
2031 *this_cache = arm_make_prologue_cache (this_frame);
2032 cache = (struct arm_prologue_cache *) *this_cache;
2033
2034 /* This is meant to halt the backtrace at "_start". */
2035 pc = get_frame_pc (this_frame);
2036 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2037 return UNWIND_OUTERMOST;
2038
2039 /* If we've hit a wall, stop. */
2040 if (cache->prev_sp == 0)
2041 return UNWIND_OUTERMOST;
2042
2043 return UNWIND_NO_REASON;
2044 }
2045
2046 /* Our frame ID for a normal frame is the current function's starting PC
2047 and the caller's SP when we were called. */
2048
2049 static void
2050 arm_prologue_this_id (struct frame_info *this_frame,
2051 void **this_cache,
2052 struct frame_id *this_id)
2053 {
2054 struct arm_prologue_cache *cache;
2055 struct frame_id id;
2056 CORE_ADDR pc, func;
2057
2058 if (*this_cache == NULL)
2059 *this_cache = arm_make_prologue_cache (this_frame);
2060 cache = (struct arm_prologue_cache *) *this_cache;
2061
2062 /* Use function start address as part of the frame ID. If we cannot
2063 identify the start address (due to missing symbol information),
2064 fall back to just using the current PC. */
2065 pc = get_frame_pc (this_frame);
2066 func = get_frame_func (this_frame);
2067 if (!func)
2068 func = pc;
2069
2070 id = frame_id_build (cache->prev_sp, func);
2071 *this_id = id;
2072 }
2073
2074 static struct value *
2075 arm_prologue_prev_register (struct frame_info *this_frame,
2076 void **this_cache,
2077 int prev_regnum)
2078 {
2079 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2080 struct arm_prologue_cache *cache;
2081
2082 if (*this_cache == NULL)
2083 *this_cache = arm_make_prologue_cache (this_frame);
2084 cache = (struct arm_prologue_cache *) *this_cache;
2085
2086 /* If we are asked to unwind the PC, then we need to return the LR
2087 instead. The prologue may save PC, but it will point into this
2088 frame's prologue, not the next frame's resume location. Also
2089 strip the saved T bit. A valid LR may have the low bit set, but
2090 a valid PC never does. */
2091 if (prev_regnum == ARM_PC_REGNUM)
2092 {
2093 CORE_ADDR lr;
2094
2095 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2096 return frame_unwind_got_constant (this_frame, prev_regnum,
2097 arm_addr_bits_remove (gdbarch, lr));
2098 }
2099
2100 /* SP is generally not saved to the stack, but this frame is
2101 identified by the next frame's stack pointer at the time of the call.
2102 The value was already reconstructed into PREV_SP. */
2103 if (prev_regnum == ARM_SP_REGNUM)
2104 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2105
2106 /* The CPSR may have been changed by the call instruction and by the
2107 called function. The only bit we can reconstruct is the T bit,
2108 by checking the low bit of LR as of the call. This is a reliable
2109 indicator of Thumb-ness except for some ARM v4T pre-interworking
2110 Thumb code, which could get away with a clear low bit as long as
2111 the called function did not use bx. Guess that all other
2112 bits are unchanged; the condition flags are presumably lost,
2113 but the processor status is likely valid. */
2114 if (prev_regnum == ARM_PS_REGNUM)
2115 {
2116 CORE_ADDR lr, cpsr;
2117 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2118
2119 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2120 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2121 if (IS_THUMB_ADDR (lr))
2122 cpsr |= t_bit;
2123 else
2124 cpsr &= ~t_bit;
2125 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2126 }
2127
2128 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2129 prev_regnum);
2130 }
2131
2132 struct frame_unwind arm_prologue_unwind = {
2133 NORMAL_FRAME,
2134 arm_prologue_unwind_stop_reason,
2135 arm_prologue_this_id,
2136 arm_prologue_prev_register,
2137 NULL,
2138 default_frame_sniffer
2139 };
2140
2141 /* Maintain a list of ARM exception table entries per objfile, similar to the
2142 list of mapping symbols. We only cache entries for standard ARM-defined
2143 personality routines; the cache will contain only the frame unwinding
2144 instructions associated with the entry (not the descriptors). */
2145
2146 static const struct objfile_data *arm_exidx_data_key;
2147
2148 struct arm_exidx_entry
2149 {
2150 bfd_vma addr;
2151 gdb_byte *entry;
2152 };
2153 typedef struct arm_exidx_entry arm_exidx_entry_s;
2154 DEF_VEC_O(arm_exidx_entry_s);
2155
2156 struct arm_exidx_data
2157 {
2158 VEC(arm_exidx_entry_s) **section_maps;
2159 };
2160
2161 static void
2162 arm_exidx_data_free (struct objfile *objfile, void *arg)
2163 {
2164 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2165 unsigned int i;
2166
2167 for (i = 0; i < objfile->obfd->section_count; i++)
2168 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2169 }
2170
2171 static inline int
2172 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2173 const struct arm_exidx_entry *rhs)
2174 {
2175 return lhs->addr < rhs->addr;
2176 }
2177
2178 static struct obj_section *
2179 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2180 {
2181 struct obj_section *osect;
2182
2183 ALL_OBJFILE_OSECTIONS (objfile, osect)
2184 if (bfd_get_section_flags (objfile->obfd,
2185 osect->the_bfd_section) & SEC_ALLOC)
2186 {
2187 bfd_vma start, size;
2188 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2189 size = bfd_get_section_size (osect->the_bfd_section);
2190
2191 if (start <= vma && vma < start + size)
2192 return osect;
2193 }
2194
2195 return NULL;
2196 }
2197
2198 /* Parse contents of exception table and exception index sections
2199 of OBJFILE, and fill in the exception table entry cache.
2200
2201 For each entry that refers to a standard ARM-defined personality
2202 routine, extract the frame unwinding instructions (from either
2203 the index or the table section). The unwinding instructions
2204 are normalized by:
2205 - extracting them from the rest of the table data
2206 - converting to host endianness
2207 - appending the implicit 0xb0 ("Finish") code
2208
2209 The extracted and normalized instructions are stored for later
2210 retrieval by the arm_find_exidx_entry routine. */
2211
2212 static void
2213 arm_exidx_new_objfile (struct objfile *objfile)
2214 {
2215 struct cleanup *cleanups;
2216 struct arm_exidx_data *data;
2217 asection *exidx, *extab;
2218 bfd_vma exidx_vma = 0, extab_vma = 0;
2219 bfd_size_type exidx_size = 0, extab_size = 0;
2220 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2221 LONGEST i;
2222
2223 /* If we've already touched this file, do nothing. */
2224 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2225 return;
2226 cleanups = make_cleanup (null_cleanup, NULL);
2227
2228 /* Read contents of exception table and index. */
2229 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2230 if (exidx)
2231 {
2232 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2233 exidx_size = bfd_get_section_size (exidx);
2234 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2235 make_cleanup (xfree, exidx_data);
2236
2237 if (!bfd_get_section_contents (objfile->obfd, exidx,
2238 exidx_data, 0, exidx_size))
2239 {
2240 do_cleanups (cleanups);
2241 return;
2242 }
2243 }
2244
2245 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2246 if (extab)
2247 {
2248 extab_vma = bfd_section_vma (objfile->obfd, extab);
2249 extab_size = bfd_get_section_size (extab);
2250 extab_data = (gdb_byte *) xmalloc (extab_size);
2251 make_cleanup (xfree, extab_data);
2252
2253 if (!bfd_get_section_contents (objfile->obfd, extab,
2254 extab_data, 0, extab_size))
2255 {
2256 do_cleanups (cleanups);
2257 return;
2258 }
2259 }
2260
2261 /* Allocate exception table data structure. */
2262 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2263 set_objfile_data (objfile, arm_exidx_data_key, data);
2264 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2265 objfile->obfd->section_count,
2266 VEC(arm_exidx_entry_s) *);
2267
2268 /* Fill in exception table. */
2269 for (i = 0; i < exidx_size / 8; i++)
2270 {
2271 struct arm_exidx_entry new_exidx_entry;
2272 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2273 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2274 bfd_vma addr = 0, word = 0;
2275 int n_bytes = 0, n_words = 0;
2276 struct obj_section *sec;
2277 gdb_byte *entry = NULL;
2278
2279 /* Extract address of start of function. */
2280 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2281 idx += exidx_vma + i * 8;
2282
2283 /* Find section containing function and compute section offset. */
2284 sec = arm_obj_section_from_vma (objfile, idx);
2285 if (sec == NULL)
2286 continue;
2287 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2288
2289 /* Determine address of exception table entry. */
2290 if (val == 1)
2291 {
2292 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2293 }
2294 else if ((val & 0xff000000) == 0x80000000)
2295 {
2296 /* Exception table entry embedded in .ARM.exidx
2297 -- must be short form. */
2298 word = val;
2299 n_bytes = 3;
2300 }
2301 else if (!(val & 0x80000000))
2302 {
2303 /* Exception table entry in .ARM.extab. */
2304 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2305 addr += exidx_vma + i * 8 + 4;
2306
2307 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2308 {
2309 word = bfd_h_get_32 (objfile->obfd,
2310 extab_data + addr - extab_vma);
2311 addr += 4;
2312
2313 if ((word & 0xff000000) == 0x80000000)
2314 {
2315 /* Short form. */
2316 n_bytes = 3;
2317 }
2318 else if ((word & 0xff000000) == 0x81000000
2319 || (word & 0xff000000) == 0x82000000)
2320 {
2321 /* Long form. */
2322 n_bytes = 2;
2323 n_words = ((word >> 16) & 0xff);
2324 }
2325 else if (!(word & 0x80000000))
2326 {
2327 bfd_vma pers;
2328 struct obj_section *pers_sec;
2329 int gnu_personality = 0;
2330
2331 /* Custom personality routine. */
2332 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2333 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2334
2335 /* Check whether we've got one of the variants of the
2336 GNU personality routines. */
2337 pers_sec = arm_obj_section_from_vma (objfile, pers);
2338 if (pers_sec)
2339 {
2340 static const char *personality[] =
2341 {
2342 "__gcc_personality_v0",
2343 "__gxx_personality_v0",
2344 "__gcj_personality_v0",
2345 "__gnu_objc_personality_v0",
2346 NULL
2347 };
2348
2349 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2350 int k;
2351
2352 for (k = 0; personality[k]; k++)
2353 if (lookup_minimal_symbol_by_pc_name
2354 (pc, personality[k], objfile))
2355 {
2356 gnu_personality = 1;
2357 break;
2358 }
2359 }
2360
2361 /* If so, the next word contains a word count in the high
2362 byte, followed by the same unwind instructions as the
2363 pre-defined forms. */
2364 if (gnu_personality
2365 && addr + 4 <= extab_vma + extab_size)
2366 {
2367 word = bfd_h_get_32 (objfile->obfd,
2368 extab_data + addr - extab_vma);
2369 addr += 4;
2370 n_bytes = 3;
2371 n_words = ((word >> 24) & 0xff);
2372 }
2373 }
2374 }
2375 }
2376
2377 /* Sanity check address. */
2378 if (n_words)
2379 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2380 n_words = n_bytes = 0;
2381
2382 /* The unwind instructions reside in WORD (only the N_BYTES least
2383 significant bytes are valid), followed by N_WORDS words in the
2384 extab section starting at ADDR. */
2385 if (n_bytes || n_words)
2386 {
2387 gdb_byte *p = entry
2388 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2389 n_bytes + n_words * 4 + 1);
2390
2391 while (n_bytes--)
2392 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2393
2394 while (n_words--)
2395 {
2396 word = bfd_h_get_32 (objfile->obfd,
2397 extab_data + addr - extab_vma);
2398 addr += 4;
2399
2400 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2401 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2402 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2403 *p++ = (gdb_byte) (word & 0xff);
2404 }
2405
2406 /* Implied "Finish" to terminate the list. */
2407 *p++ = 0xb0;
2408 }
2409
2410 /* Push entry onto vector. They are guaranteed to always
2411 appear in order of increasing addresses. */
2412 new_exidx_entry.addr = idx;
2413 new_exidx_entry.entry = entry;
2414 VEC_safe_push (arm_exidx_entry_s,
2415 data->section_maps[sec->the_bfd_section->index],
2416 &new_exidx_entry);
2417 }
2418
2419 do_cleanups (cleanups);
2420 }
2421
2422 /* Search for the exception table entry covering MEMADDR. If one is found,
2423 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2424 set *START to the start of the region covered by this entry. */
2425
2426 static gdb_byte *
2427 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2428 {
2429 struct obj_section *sec;
2430
2431 sec = find_pc_section (memaddr);
2432 if (sec != NULL)
2433 {
2434 struct arm_exidx_data *data;
2435 VEC(arm_exidx_entry_s) *map;
2436 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2437 unsigned int idx;
2438
2439 data = ((struct arm_exidx_data *)
2440 objfile_data (sec->objfile, arm_exidx_data_key));
2441 if (data != NULL)
2442 {
2443 map = data->section_maps[sec->the_bfd_section->index];
2444 if (!VEC_empty (arm_exidx_entry_s, map))
2445 {
2446 struct arm_exidx_entry *map_sym;
2447
2448 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2449 arm_compare_exidx_entries);
2450
2451 /* VEC_lower_bound finds the earliest ordered insertion
2452 point. If the following symbol starts at this exact
2453 address, we use that; otherwise, the preceding
2454 exception table entry covers this address. */
2455 if (idx < VEC_length (arm_exidx_entry_s, map))
2456 {
2457 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2458 if (map_sym->addr == map_key.addr)
2459 {
2460 if (start)
2461 *start = map_sym->addr + obj_section_addr (sec);
2462 return map_sym->entry;
2463 }
2464 }
2465
2466 if (idx > 0)
2467 {
2468 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2469 if (start)
2470 *start = map_sym->addr + obj_section_addr (sec);
2471 return map_sym->entry;
2472 }
2473 }
2474 }
2475 }
2476
2477 return NULL;
2478 }
2479
2480 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2481 instruction list from the ARM exception table entry ENTRY, allocate and
2482 return a prologue cache structure describing how to unwind this frame.
2483
2484 Return NULL if the unwinding instruction list contains a "spare",
2485 "reserved" or "refuse to unwind" instruction as defined in section
2486 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2487 for the ARM Architecture" document. */
2488
2489 static struct arm_prologue_cache *
2490 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2491 {
2492 CORE_ADDR vsp = 0;
2493 int vsp_valid = 0;
2494
2495 struct arm_prologue_cache *cache;
2496 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2497 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2498
2499 for (;;)
2500 {
2501 gdb_byte insn;
2502
2503 /* Whenever we reload SP, we actually have to retrieve its
2504 actual value in the current frame. */
2505 if (!vsp_valid)
2506 {
2507 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2508 {
2509 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2510 vsp = get_frame_register_unsigned (this_frame, reg);
2511 }
2512 else
2513 {
2514 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2515 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2516 }
2517
2518 vsp_valid = 1;
2519 }
2520
2521 /* Decode next unwind instruction. */
2522 insn = *entry++;
2523
2524 if ((insn & 0xc0) == 0)
2525 {
2526 int offset = insn & 0x3f;
2527 vsp += (offset << 2) + 4;
2528 }
2529 else if ((insn & 0xc0) == 0x40)
2530 {
2531 int offset = insn & 0x3f;
2532 vsp -= (offset << 2) + 4;
2533 }
2534 else if ((insn & 0xf0) == 0x80)
2535 {
2536 int mask = ((insn & 0xf) << 8) | *entry++;
2537 int i;
2538
2539 /* The special case of an all-zero mask identifies
2540 "Refuse to unwind". We return NULL to fall back
2541 to the prologue analyzer. */
2542 if (mask == 0)
2543 return NULL;
2544
2545 /* Pop registers r4..r15 under mask. */
2546 for (i = 0; i < 12; i++)
2547 if (mask & (1 << i))
2548 {
2549 cache->saved_regs[4 + i].addr = vsp;
2550 vsp += 4;
2551 }
2552
2553 /* Special-case popping SP -- we need to reload vsp. */
2554 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2555 vsp_valid = 0;
2556 }
2557 else if ((insn & 0xf0) == 0x90)
2558 {
2559 int reg = insn & 0xf;
2560
2561 /* Reserved cases. */
2562 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2563 return NULL;
2564
2565 /* Set SP from another register and mark VSP for reload. */
2566 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2567 vsp_valid = 0;
2568 }
2569 else if ((insn & 0xf0) == 0xa0)
2570 {
2571 int count = insn & 0x7;
2572 int pop_lr = (insn & 0x8) != 0;
2573 int i;
2574
2575 /* Pop r4..r[4+count]. */
2576 for (i = 0; i <= count; i++)
2577 {
2578 cache->saved_regs[4 + i].addr = vsp;
2579 vsp += 4;
2580 }
2581
2582 /* If indicated by flag, pop LR as well. */
2583 if (pop_lr)
2584 {
2585 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2586 vsp += 4;
2587 }
2588 }
2589 else if (insn == 0xb0)
2590 {
2591 /* We could only have updated PC by popping into it; if so, it
2592 will show up as address. Otherwise, copy LR into PC. */
2593 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2594 cache->saved_regs[ARM_PC_REGNUM]
2595 = cache->saved_regs[ARM_LR_REGNUM];
2596
2597 /* We're done. */
2598 break;
2599 }
2600 else if (insn == 0xb1)
2601 {
2602 int mask = *entry++;
2603 int i;
2604
2605 /* All-zero mask and mask >= 16 is "spare". */
2606 if (mask == 0 || mask >= 16)
2607 return NULL;
2608
2609 /* Pop r0..r3 under mask. */
2610 for (i = 0; i < 4; i++)
2611 if (mask & (1 << i))
2612 {
2613 cache->saved_regs[i].addr = vsp;
2614 vsp += 4;
2615 }
2616 }
2617 else if (insn == 0xb2)
2618 {
2619 ULONGEST offset = 0;
2620 unsigned shift = 0;
2621
2622 do
2623 {
2624 offset |= (*entry & 0x7f) << shift;
2625 shift += 7;
2626 }
2627 while (*entry++ & 0x80);
2628
2629 vsp += 0x204 + (offset << 2);
2630 }
2631 else if (insn == 0xb3)
2632 {
2633 int start = *entry >> 4;
2634 int count = (*entry++) & 0xf;
2635 int i;
2636
2637 /* Only registers D0..D15 are valid here. */
2638 if (start + count >= 16)
2639 return NULL;
2640
2641 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2642 for (i = 0; i <= count; i++)
2643 {
2644 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2645 vsp += 8;
2646 }
2647
2648 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2649 vsp += 4;
2650 }
2651 else if ((insn & 0xf8) == 0xb8)
2652 {
2653 int count = insn & 0x7;
2654 int i;
2655
2656 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2657 for (i = 0; i <= count; i++)
2658 {
2659 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2660 vsp += 8;
2661 }
2662
2663 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2664 vsp += 4;
2665 }
2666 else if (insn == 0xc6)
2667 {
2668 int start = *entry >> 4;
2669 int count = (*entry++) & 0xf;
2670 int i;
2671
2672 /* Only registers WR0..WR15 are valid. */
2673 if (start + count >= 16)
2674 return NULL;
2675
2676 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2677 for (i = 0; i <= count; i++)
2678 {
2679 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2680 vsp += 8;
2681 }
2682 }
2683 else if (insn == 0xc7)
2684 {
2685 int mask = *entry++;
2686 int i;
2687
2688 /* All-zero mask and mask >= 16 is "spare". */
2689 if (mask == 0 || mask >= 16)
2690 return NULL;
2691
2692 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2693 for (i = 0; i < 4; i++)
2694 if (mask & (1 << i))
2695 {
2696 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2697 vsp += 4;
2698 }
2699 }
2700 else if ((insn & 0xf8) == 0xc0)
2701 {
2702 int count = insn & 0x7;
2703 int i;
2704
2705 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2706 for (i = 0; i <= count; i++)
2707 {
2708 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2709 vsp += 8;
2710 }
2711 }
2712 else if (insn == 0xc8)
2713 {
2714 int start = *entry >> 4;
2715 int count = (*entry++) & 0xf;
2716 int i;
2717
2718 /* Only registers D0..D31 are valid. */
2719 if (start + count >= 16)
2720 return NULL;
2721
2722 /* Pop VFP double-precision registers
2723 D[16+start]..D[16+start+count]. */
2724 for (i = 0; i <= count; i++)
2725 {
2726 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2727 vsp += 8;
2728 }
2729 }
2730 else if (insn == 0xc9)
2731 {
2732 int start = *entry >> 4;
2733 int count = (*entry++) & 0xf;
2734 int i;
2735
2736 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2737 for (i = 0; i <= count; i++)
2738 {
2739 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2740 vsp += 8;
2741 }
2742 }
2743 else if ((insn & 0xf8) == 0xd0)
2744 {
2745 int count = insn & 0x7;
2746 int i;
2747
2748 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2749 for (i = 0; i <= count; i++)
2750 {
2751 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2752 vsp += 8;
2753 }
2754 }
2755 else
2756 {
2757 /* Everything else is "spare". */
2758 return NULL;
2759 }
2760 }
2761
2762 /* If we restore SP from a register, assume this was the frame register.
2763 Otherwise just fall back to SP as frame register. */
2764 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2765 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2766 else
2767 cache->framereg = ARM_SP_REGNUM;
2768
2769 /* Determine offset to previous frame. */
2770 cache->framesize
2771 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2772
2773 /* We already got the previous SP. */
2774 cache->prev_sp = vsp;
2775
2776 return cache;
2777 }
2778
2779 /* Unwinding via ARM exception table entries. Note that the sniffer
2780 already computes a filled-in prologue cache, which is then used
2781 with the same arm_prologue_this_id and arm_prologue_prev_register
2782 routines also used for prologue-parsing based unwinding. */
2783
2784 static int
2785 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2786 struct frame_info *this_frame,
2787 void **this_prologue_cache)
2788 {
2789 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2790 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2791 CORE_ADDR addr_in_block, exidx_region, func_start;
2792 struct arm_prologue_cache *cache;
2793 gdb_byte *entry;
2794
2795 /* See if we have an ARM exception table entry covering this address. */
2796 addr_in_block = get_frame_address_in_block (this_frame);
2797 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2798 if (!entry)
2799 return 0;
2800
2801 /* The ARM exception table does not describe unwind information
2802 for arbitrary PC values, but is guaranteed to be correct only
2803 at call sites. We have to decide here whether we want to use
2804 ARM exception table information for this frame, or fall back
2805 to using prologue parsing. (Note that if we have DWARF CFI,
2806 this sniffer isn't even called -- CFI is always preferred.)
2807
2808 Before we make this decision, however, we check whether we
2809 actually have *symbol* information for the current frame.
2810 If not, prologue parsing would not work anyway, so we might
2811 as well use the exception table and hope for the best. */
2812 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2813 {
2814 int exc_valid = 0;
2815
2816 /* If the next frame is "normal", we are at a call site in this
2817 frame, so exception information is guaranteed to be valid. */
2818 if (get_next_frame (this_frame)
2819 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2820 exc_valid = 1;
2821
2822 /* We also assume exception information is valid if we're currently
2823 blocked in a system call. The system library is supposed to
2824 ensure this, so that e.g. pthread cancellation works. */
2825 if (arm_frame_is_thumb (this_frame))
2826 {
2827 LONGEST insn;
2828
2829 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2830 byte_order_for_code, &insn)
2831 && (insn & 0xff00) == 0xdf00 /* svc */)
2832 exc_valid = 1;
2833 }
2834 else
2835 {
2836 LONGEST insn;
2837
2838 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2839 byte_order_for_code, &insn)
2840 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2841 exc_valid = 1;
2842 }
2843
2844 /* Bail out if we don't know that exception information is valid. */
2845 if (!exc_valid)
2846 return 0;
2847
2848 /* The ARM exception index does not mark the *end* of the region
2849 covered by the entry, and some functions will not have any entry.
2850 To correctly recognize the end of the covered region, the linker
2851 should have inserted dummy records with a CANTUNWIND marker.
2852
2853 Unfortunately, current versions of GNU ld do not reliably do
2854 this, and thus we may have found an incorrect entry above.
2855 As a (temporary) sanity check, we only use the entry if it
2856 lies *within* the bounds of the function. Note that this check
2857 might reject perfectly valid entries that just happen to cover
2858 multiple functions; therefore this check ought to be removed
2859 once the linker is fixed. */
2860 if (func_start > exidx_region)
2861 return 0;
2862 }
2863
2864 /* Decode the list of unwinding instructions into a prologue cache.
2865 Note that this may fail due to e.g. a "refuse to unwind" code. */
2866 cache = arm_exidx_fill_cache (this_frame, entry);
2867 if (!cache)
2868 return 0;
2869
2870 *this_prologue_cache = cache;
2871 return 1;
2872 }
2873
2874 struct frame_unwind arm_exidx_unwind = {
2875 NORMAL_FRAME,
2876 default_frame_unwind_stop_reason,
2877 arm_prologue_this_id,
2878 arm_prologue_prev_register,
2879 NULL,
2880 arm_exidx_unwind_sniffer
2881 };
2882
2883 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2884 trampoline, return the target PC. Otherwise return 0.
2885
2886 void call0a (char c, short s, int i, long l) {}
2887
2888 int main (void)
2889 {
2890 (*pointer_to_call0a) (c, s, i, l);
2891 }
2892
2893 Instead of calling a stub library function _call_via_xx (xx is
2894 the register name), GCC may inline the trampoline in the object
2895 file as below (register r2 has the address of call0a).
2896
2897 .global main
2898 .type main, %function
2899 ...
2900 bl .L1
2901 ...
2902 .size main, .-main
2903
2904 .L1:
2905 bx r2
2906
2907 The trampoline 'bx r2' doesn't belong to main. */
2908
2909 static CORE_ADDR
2910 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2911 {
2912 /* The heuristics of recognizing such trampoline is that FRAME is
2913 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2914 if (arm_frame_is_thumb (frame))
2915 {
2916 gdb_byte buf[2];
2917
2918 if (target_read_memory (pc, buf, 2) == 0)
2919 {
2920 struct gdbarch *gdbarch = get_frame_arch (frame);
2921 enum bfd_endian byte_order_for_code
2922 = gdbarch_byte_order_for_code (gdbarch);
2923 uint16_t insn
2924 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2925
2926 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2927 {
2928 CORE_ADDR dest
2929 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2930
2931 /* Clear the LSB so that gdb core sets step-resume
2932 breakpoint at the right address. */
2933 return UNMAKE_THUMB_ADDR (dest);
2934 }
2935 }
2936 }
2937
2938 return 0;
2939 }
2940
2941 static struct arm_prologue_cache *
2942 arm_make_stub_cache (struct frame_info *this_frame)
2943 {
2944 struct arm_prologue_cache *cache;
2945
2946 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2947 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2948
2949 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2950
2951 return cache;
2952 }
2953
2954 /* Our frame ID for a stub frame is the current SP and LR. */
2955
2956 static void
2957 arm_stub_this_id (struct frame_info *this_frame,
2958 void **this_cache,
2959 struct frame_id *this_id)
2960 {
2961 struct arm_prologue_cache *cache;
2962
2963 if (*this_cache == NULL)
2964 *this_cache = arm_make_stub_cache (this_frame);
2965 cache = (struct arm_prologue_cache *) *this_cache;
2966
2967 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2968 }
2969
2970 static int
2971 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2972 struct frame_info *this_frame,
2973 void **this_prologue_cache)
2974 {
2975 CORE_ADDR addr_in_block;
2976 gdb_byte dummy[4];
2977 CORE_ADDR pc, start_addr;
2978 const char *name;
2979
2980 addr_in_block = get_frame_address_in_block (this_frame);
2981 pc = get_frame_pc (this_frame);
2982 if (in_plt_section (addr_in_block)
2983 /* We also use the stub winder if the target memory is unreadable
2984 to avoid having the prologue unwinder trying to read it. */
2985 || target_read_memory (pc, dummy, 4) != 0)
2986 return 1;
2987
2988 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2989 && arm_skip_bx_reg (this_frame, pc) != 0)
2990 return 1;
2991
2992 return 0;
2993 }
2994
2995 struct frame_unwind arm_stub_unwind = {
2996 NORMAL_FRAME,
2997 default_frame_unwind_stop_reason,
2998 arm_stub_this_id,
2999 arm_prologue_prev_register,
3000 NULL,
3001 arm_stub_unwind_sniffer
3002 };
3003
3004 /* Put here the code to store, into CACHE->saved_regs, the addresses
3005 of the saved registers of frame described by THIS_FRAME. CACHE is
3006 returned. */
3007
3008 static struct arm_prologue_cache *
3009 arm_m_exception_cache (struct frame_info *this_frame)
3010 {
3011 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3012 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3013 struct arm_prologue_cache *cache;
3014 CORE_ADDR unwound_sp;
3015 LONGEST xpsr;
3016
3017 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3018 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3019
3020 unwound_sp = get_frame_register_unsigned (this_frame,
3021 ARM_SP_REGNUM);
3022
3023 /* The hardware saves eight 32-bit words, comprising xPSR,
3024 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3025 "B1.5.6 Exception entry behavior" in
3026 "ARMv7-M Architecture Reference Manual". */
3027 cache->saved_regs[0].addr = unwound_sp;
3028 cache->saved_regs[1].addr = unwound_sp + 4;
3029 cache->saved_regs[2].addr = unwound_sp + 8;
3030 cache->saved_regs[3].addr = unwound_sp + 12;
3031 cache->saved_regs[12].addr = unwound_sp + 16;
3032 cache->saved_regs[14].addr = unwound_sp + 20;
3033 cache->saved_regs[15].addr = unwound_sp + 24;
3034 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3035
3036 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3037 aligner between the top of the 32-byte stack frame and the
3038 previous context's stack pointer. */
3039 cache->prev_sp = unwound_sp + 32;
3040 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3041 && (xpsr & (1 << 9)) != 0)
3042 cache->prev_sp += 4;
3043
3044 return cache;
3045 }
3046
3047 /* Implementation of function hook 'this_id' in
3048 'struct frame_uwnind'. */
3049
3050 static void
3051 arm_m_exception_this_id (struct frame_info *this_frame,
3052 void **this_cache,
3053 struct frame_id *this_id)
3054 {
3055 struct arm_prologue_cache *cache;
3056
3057 if (*this_cache == NULL)
3058 *this_cache = arm_m_exception_cache (this_frame);
3059 cache = (struct arm_prologue_cache *) *this_cache;
3060
3061 /* Our frame ID for a stub frame is the current SP and LR. */
3062 *this_id = frame_id_build (cache->prev_sp,
3063 get_frame_pc (this_frame));
3064 }
3065
3066 /* Implementation of function hook 'prev_register' in
3067 'struct frame_uwnind'. */
3068
3069 static struct value *
3070 arm_m_exception_prev_register (struct frame_info *this_frame,
3071 void **this_cache,
3072 int prev_regnum)
3073 {
3074 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3075 struct arm_prologue_cache *cache;
3076
3077 if (*this_cache == NULL)
3078 *this_cache = arm_m_exception_cache (this_frame);
3079 cache = (struct arm_prologue_cache *) *this_cache;
3080
3081 /* The value was already reconstructed into PREV_SP. */
3082 if (prev_regnum == ARM_SP_REGNUM)
3083 return frame_unwind_got_constant (this_frame, prev_regnum,
3084 cache->prev_sp);
3085
3086 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3087 prev_regnum);
3088 }
3089
3090 /* Implementation of function hook 'sniffer' in
3091 'struct frame_uwnind'. */
3092
3093 static int
3094 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3095 struct frame_info *this_frame,
3096 void **this_prologue_cache)
3097 {
3098 CORE_ADDR this_pc = get_frame_pc (this_frame);
3099
3100 /* No need to check is_m; this sniffer is only registered for
3101 M-profile architectures. */
3102
3103 /* Exception frames return to one of these magic PCs. Other values
3104 are not defined as of v7-M. See details in "B1.5.8 Exception
3105 return behavior" in "ARMv7-M Architecture Reference Manual". */
3106 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3107 || this_pc == 0xfffffffd)
3108 return 1;
3109
3110 return 0;
3111 }
3112
3113 /* Frame unwinder for M-profile exceptions. */
3114
3115 struct frame_unwind arm_m_exception_unwind =
3116 {
3117 SIGTRAMP_FRAME,
3118 default_frame_unwind_stop_reason,
3119 arm_m_exception_this_id,
3120 arm_m_exception_prev_register,
3121 NULL,
3122 arm_m_exception_unwind_sniffer
3123 };
3124
3125 static CORE_ADDR
3126 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3127 {
3128 struct arm_prologue_cache *cache;
3129
3130 if (*this_cache == NULL)
3131 *this_cache = arm_make_prologue_cache (this_frame);
3132 cache = (struct arm_prologue_cache *) *this_cache;
3133
3134 return cache->prev_sp - cache->framesize;
3135 }
3136
3137 struct frame_base arm_normal_base = {
3138 &arm_prologue_unwind,
3139 arm_normal_frame_base,
3140 arm_normal_frame_base,
3141 arm_normal_frame_base
3142 };
3143
3144 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3145 dummy frame. The frame ID's base needs to match the TOS value
3146 saved by save_dummy_frame_tos() and returned from
3147 arm_push_dummy_call, and the PC needs to match the dummy frame's
3148 breakpoint. */
3149
3150 static struct frame_id
3151 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3152 {
3153 return frame_id_build (get_frame_register_unsigned (this_frame,
3154 ARM_SP_REGNUM),
3155 get_frame_pc (this_frame));
3156 }
3157
3158 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3159 be used to construct the previous frame's ID, after looking up the
3160 containing function). */
3161
3162 static CORE_ADDR
3163 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3164 {
3165 CORE_ADDR pc;
3166 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3167 return arm_addr_bits_remove (gdbarch, pc);
3168 }
3169
3170 static CORE_ADDR
3171 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3172 {
3173 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3174 }
3175
3176 static struct value *
3177 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3178 int regnum)
3179 {
3180 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3181 CORE_ADDR lr, cpsr;
3182 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3183
3184 switch (regnum)
3185 {
3186 case ARM_PC_REGNUM:
3187 /* The PC is normally copied from the return column, which
3188 describes saves of LR. However, that version may have an
3189 extra bit set to indicate Thumb state. The bit is not
3190 part of the PC. */
3191 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3192 return frame_unwind_got_constant (this_frame, regnum,
3193 arm_addr_bits_remove (gdbarch, lr));
3194
3195 case ARM_PS_REGNUM:
3196 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3197 cpsr = get_frame_register_unsigned (this_frame, regnum);
3198 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3199 if (IS_THUMB_ADDR (lr))
3200 cpsr |= t_bit;
3201 else
3202 cpsr &= ~t_bit;
3203 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3204
3205 default:
3206 internal_error (__FILE__, __LINE__,
3207 _("Unexpected register %d"), regnum);
3208 }
3209 }
3210
3211 static void
3212 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3213 struct dwarf2_frame_state_reg *reg,
3214 struct frame_info *this_frame)
3215 {
3216 switch (regnum)
3217 {
3218 case ARM_PC_REGNUM:
3219 case ARM_PS_REGNUM:
3220 reg->how = DWARF2_FRAME_REG_FN;
3221 reg->loc.fn = arm_dwarf2_prev_register;
3222 break;
3223 case ARM_SP_REGNUM:
3224 reg->how = DWARF2_FRAME_REG_CFA;
3225 break;
3226 }
3227 }
3228
3229 /* Implement the stack_frame_destroyed_p gdbarch method. */
3230
3231 static int
3232 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3233 {
3234 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3235 unsigned int insn, insn2;
3236 int found_return = 0, found_stack_adjust = 0;
3237 CORE_ADDR func_start, func_end;
3238 CORE_ADDR scan_pc;
3239 gdb_byte buf[4];
3240
3241 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3242 return 0;
3243
3244 /* The epilogue is a sequence of instructions along the following lines:
3245
3246 - add stack frame size to SP or FP
3247 - [if frame pointer used] restore SP from FP
3248 - restore registers from SP [may include PC]
3249 - a return-type instruction [if PC wasn't already restored]
3250
3251 In a first pass, we scan forward from the current PC and verify the
3252 instructions we find as compatible with this sequence, ending in a
3253 return instruction.
3254
3255 However, this is not sufficient to distinguish indirect function calls
3256 within a function from indirect tail calls in the epilogue in some cases.
3257 Therefore, if we didn't already find any SP-changing instruction during
3258 forward scan, we add a backward scanning heuristic to ensure we actually
3259 are in the epilogue. */
3260
3261 scan_pc = pc;
3262 while (scan_pc < func_end && !found_return)
3263 {
3264 if (target_read_memory (scan_pc, buf, 2))
3265 break;
3266
3267 scan_pc += 2;
3268 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3269
3270 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3271 found_return = 1;
3272 else if (insn == 0x46f7) /* mov pc, lr */
3273 found_return = 1;
3274 else if (thumb_instruction_restores_sp (insn))
3275 {
3276 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3277 found_return = 1;
3278 }
3279 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3280 {
3281 if (target_read_memory (scan_pc, buf, 2))
3282 break;
3283
3284 scan_pc += 2;
3285 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3286
3287 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3288 {
3289 if (insn2 & 0x8000) /* <registers> include PC. */
3290 found_return = 1;
3291 }
3292 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3293 && (insn2 & 0x0fff) == 0x0b04)
3294 {
3295 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3296 found_return = 1;
3297 }
3298 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3299 && (insn2 & 0x0e00) == 0x0a00)
3300 ;
3301 else
3302 break;
3303 }
3304 else
3305 break;
3306 }
3307
3308 if (!found_return)
3309 return 0;
3310
3311 /* Since any instruction in the epilogue sequence, with the possible
3312 exception of return itself, updates the stack pointer, we need to
3313 scan backwards for at most one instruction. Try either a 16-bit or
3314 a 32-bit instruction. This is just a heuristic, so we do not worry
3315 too much about false positives. */
3316
3317 if (pc - 4 < func_start)
3318 return 0;
3319 if (target_read_memory (pc - 4, buf, 4))
3320 return 0;
3321
3322 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3323 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3324
3325 if (thumb_instruction_restores_sp (insn2))
3326 found_stack_adjust = 1;
3327 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3328 found_stack_adjust = 1;
3329 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3330 && (insn2 & 0x0fff) == 0x0b04)
3331 found_stack_adjust = 1;
3332 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3333 && (insn2 & 0x0e00) == 0x0a00)
3334 found_stack_adjust = 1;
3335
3336 return found_stack_adjust;
3337 }
3338
3339 /* Implement the stack_frame_destroyed_p gdbarch method. */
3340
3341 static int
3342 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3343 {
3344 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3345 unsigned int insn;
3346 int found_return;
3347 CORE_ADDR func_start, func_end;
3348
3349 if (arm_pc_is_thumb (gdbarch, pc))
3350 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3351
3352 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3353 return 0;
3354
3355 /* We are in the epilogue if the previous instruction was a stack
3356 adjustment and the next instruction is a possible return (bx, mov
3357 pc, or pop). We could have to scan backwards to find the stack
3358 adjustment, or forwards to find the return, but this is a decent
3359 approximation. First scan forwards. */
3360
3361 found_return = 0;
3362 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3363 if (bits (insn, 28, 31) != INST_NV)
3364 {
3365 if ((insn & 0x0ffffff0) == 0x012fff10)
3366 /* BX. */
3367 found_return = 1;
3368 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3369 /* MOV PC. */
3370 found_return = 1;
3371 else if ((insn & 0x0fff0000) == 0x08bd0000
3372 && (insn & 0x0000c000) != 0)
3373 /* POP (LDMIA), including PC or LR. */
3374 found_return = 1;
3375 }
3376
3377 if (!found_return)
3378 return 0;
3379
3380 /* Scan backwards. This is just a heuristic, so do not worry about
3381 false positives from mode changes. */
3382
3383 if (pc < func_start + 4)
3384 return 0;
3385
3386 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3387 if (arm_instruction_restores_sp (insn))
3388 return 1;
3389
3390 return 0;
3391 }
3392
3393
3394 /* When arguments must be pushed onto the stack, they go on in reverse
3395 order. The code below implements a FILO (stack) to do this. */
3396
3397 struct stack_item
3398 {
3399 int len;
3400 struct stack_item *prev;
3401 void *data;
3402 };
3403
3404 static struct stack_item *
3405 push_stack_item (struct stack_item *prev, const void *contents, int len)
3406 {
3407 struct stack_item *si;
3408 si = XNEW (struct stack_item);
3409 si->data = xmalloc (len);
3410 si->len = len;
3411 si->prev = prev;
3412 memcpy (si->data, contents, len);
3413 return si;
3414 }
3415
3416 static struct stack_item *
3417 pop_stack_item (struct stack_item *si)
3418 {
3419 struct stack_item *dead = si;
3420 si = si->prev;
3421 xfree (dead->data);
3422 xfree (dead);
3423 return si;
3424 }
3425
3426
3427 /* Return the alignment (in bytes) of the given type. */
3428
3429 static int
3430 arm_type_align (struct type *t)
3431 {
3432 int n;
3433 int align;
3434 int falign;
3435
3436 t = check_typedef (t);
3437 switch (TYPE_CODE (t))
3438 {
3439 default:
3440 /* Should never happen. */
3441 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3442 return 4;
3443
3444 case TYPE_CODE_PTR:
3445 case TYPE_CODE_ENUM:
3446 case TYPE_CODE_INT:
3447 case TYPE_CODE_FLT:
3448 case TYPE_CODE_SET:
3449 case TYPE_CODE_RANGE:
3450 case TYPE_CODE_REF:
3451 case TYPE_CODE_CHAR:
3452 case TYPE_CODE_BOOL:
3453 return TYPE_LENGTH (t);
3454
3455 case TYPE_CODE_ARRAY:
3456 case TYPE_CODE_COMPLEX:
3457 /* TODO: What about vector types? */
3458 return arm_type_align (TYPE_TARGET_TYPE (t));
3459
3460 case TYPE_CODE_STRUCT:
3461 case TYPE_CODE_UNION:
3462 align = 1;
3463 for (n = 0; n < TYPE_NFIELDS (t); n++)
3464 {
3465 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3466 if (falign > align)
3467 align = falign;
3468 }
3469 return align;
3470 }
3471 }
3472
3473 /* Possible base types for a candidate for passing and returning in
3474 VFP registers. */
3475
3476 enum arm_vfp_cprc_base_type
3477 {
3478 VFP_CPRC_UNKNOWN,
3479 VFP_CPRC_SINGLE,
3480 VFP_CPRC_DOUBLE,
3481 VFP_CPRC_VEC64,
3482 VFP_CPRC_VEC128
3483 };
3484
3485 /* The length of one element of base type B. */
3486
3487 static unsigned
3488 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3489 {
3490 switch (b)
3491 {
3492 case VFP_CPRC_SINGLE:
3493 return 4;
3494 case VFP_CPRC_DOUBLE:
3495 return 8;
3496 case VFP_CPRC_VEC64:
3497 return 8;
3498 case VFP_CPRC_VEC128:
3499 return 16;
3500 default:
3501 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3502 (int) b);
3503 }
3504 }
3505
3506 /* The character ('s', 'd' or 'q') for the type of VFP register used
3507 for passing base type B. */
3508
3509 static int
3510 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3511 {
3512 switch (b)
3513 {
3514 case VFP_CPRC_SINGLE:
3515 return 's';
3516 case VFP_CPRC_DOUBLE:
3517 return 'd';
3518 case VFP_CPRC_VEC64:
3519 return 'd';
3520 case VFP_CPRC_VEC128:
3521 return 'q';
3522 default:
3523 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3524 (int) b);
3525 }
3526 }
3527
3528 /* Determine whether T may be part of a candidate for passing and
3529 returning in VFP registers, ignoring the limit on the total number
3530 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3531 classification of the first valid component found; if it is not
3532 VFP_CPRC_UNKNOWN, all components must have the same classification
3533 as *BASE_TYPE. If it is found that T contains a type not permitted
3534 for passing and returning in VFP registers, a type differently
3535 classified from *BASE_TYPE, or two types differently classified
3536 from each other, return -1, otherwise return the total number of
3537 base-type elements found (possibly 0 in an empty structure or
3538 array). Vector types are not currently supported, matching the
3539 generic AAPCS support. */
3540
3541 static int
3542 arm_vfp_cprc_sub_candidate (struct type *t,
3543 enum arm_vfp_cprc_base_type *base_type)
3544 {
3545 t = check_typedef (t);
3546 switch (TYPE_CODE (t))
3547 {
3548 case TYPE_CODE_FLT:
3549 switch (TYPE_LENGTH (t))
3550 {
3551 case 4:
3552 if (*base_type == VFP_CPRC_UNKNOWN)
3553 *base_type = VFP_CPRC_SINGLE;
3554 else if (*base_type != VFP_CPRC_SINGLE)
3555 return -1;
3556 return 1;
3557
3558 case 8:
3559 if (*base_type == VFP_CPRC_UNKNOWN)
3560 *base_type = VFP_CPRC_DOUBLE;
3561 else if (*base_type != VFP_CPRC_DOUBLE)
3562 return -1;
3563 return 1;
3564
3565 default:
3566 return -1;
3567 }
3568 break;
3569
3570 case TYPE_CODE_COMPLEX:
3571 /* Arguments of complex T where T is one of the types float or
3572 double get treated as if they are implemented as:
3573
3574 struct complexT
3575 {
3576 T real;
3577 T imag;
3578 };
3579
3580 */
3581 switch (TYPE_LENGTH (t))
3582 {
3583 case 8:
3584 if (*base_type == VFP_CPRC_UNKNOWN)
3585 *base_type = VFP_CPRC_SINGLE;
3586 else if (*base_type != VFP_CPRC_SINGLE)
3587 return -1;
3588 return 2;
3589
3590 case 16:
3591 if (*base_type == VFP_CPRC_UNKNOWN)
3592 *base_type = VFP_CPRC_DOUBLE;
3593 else if (*base_type != VFP_CPRC_DOUBLE)
3594 return -1;
3595 return 2;
3596
3597 default:
3598 return -1;
3599 }
3600 break;
3601
3602 case TYPE_CODE_ARRAY:
3603 {
3604 int count;
3605 unsigned unitlen;
3606 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3607 if (count == -1)
3608 return -1;
3609 if (TYPE_LENGTH (t) == 0)
3610 {
3611 gdb_assert (count == 0);
3612 return 0;
3613 }
3614 else if (count == 0)
3615 return -1;
3616 unitlen = arm_vfp_cprc_unit_length (*base_type);
3617 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3618 return TYPE_LENGTH (t) / unitlen;
3619 }
3620 break;
3621
3622 case TYPE_CODE_STRUCT:
3623 {
3624 int count = 0;
3625 unsigned unitlen;
3626 int i;
3627 for (i = 0; i < TYPE_NFIELDS (t); i++)
3628 {
3629 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3630 base_type);
3631 if (sub_count == -1)
3632 return -1;
3633 count += sub_count;
3634 }
3635 if (TYPE_LENGTH (t) == 0)
3636 {
3637 gdb_assert (count == 0);
3638 return 0;
3639 }
3640 else if (count == 0)
3641 return -1;
3642 unitlen = arm_vfp_cprc_unit_length (*base_type);
3643 if (TYPE_LENGTH (t) != unitlen * count)
3644 return -1;
3645 return count;
3646 }
3647
3648 case TYPE_CODE_UNION:
3649 {
3650 int count = 0;
3651 unsigned unitlen;
3652 int i;
3653 for (i = 0; i < TYPE_NFIELDS (t); i++)
3654 {
3655 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3656 base_type);
3657 if (sub_count == -1)
3658 return -1;
3659 count = (count > sub_count ? count : sub_count);
3660 }
3661 if (TYPE_LENGTH (t) == 0)
3662 {
3663 gdb_assert (count == 0);
3664 return 0;
3665 }
3666 else if (count == 0)
3667 return -1;
3668 unitlen = arm_vfp_cprc_unit_length (*base_type);
3669 if (TYPE_LENGTH (t) != unitlen * count)
3670 return -1;
3671 return count;
3672 }
3673
3674 default:
3675 break;
3676 }
3677
3678 return -1;
3679 }
3680
3681 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3682 if passed to or returned from a non-variadic function with the VFP
3683 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3684 *BASE_TYPE to the base type for T and *COUNT to the number of
3685 elements of that base type before returning. */
3686
3687 static int
3688 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3689 int *count)
3690 {
3691 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3692 int c = arm_vfp_cprc_sub_candidate (t, &b);
3693 if (c <= 0 || c > 4)
3694 return 0;
3695 *base_type = b;
3696 *count = c;
3697 return 1;
3698 }
3699
3700 /* Return 1 if the VFP ABI should be used for passing arguments to and
3701 returning values from a function of type FUNC_TYPE, 0
3702 otherwise. */
3703
3704 static int
3705 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3706 {
3707 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3708 /* Variadic functions always use the base ABI. Assume that functions
3709 without debug info are not variadic. */
3710 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3711 return 0;
3712 /* The VFP ABI is only supported as a variant of AAPCS. */
3713 if (tdep->arm_abi != ARM_ABI_AAPCS)
3714 return 0;
3715 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3716 }
3717
3718 /* We currently only support passing parameters in integer registers, which
3719 conforms with GCC's default model, and VFP argument passing following
3720 the VFP variant of AAPCS. Several other variants exist and
3721 we should probably support some of them based on the selected ABI. */
3722
3723 static CORE_ADDR
3724 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3725 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3726 struct value **args, CORE_ADDR sp, int struct_return,
3727 CORE_ADDR struct_addr)
3728 {
3729 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3730 int argnum;
3731 int argreg;
3732 int nstack;
3733 struct stack_item *si = NULL;
3734 int use_vfp_abi;
3735 struct type *ftype;
3736 unsigned vfp_regs_free = (1 << 16) - 1;
3737
3738 /* Determine the type of this function and whether the VFP ABI
3739 applies. */
3740 ftype = check_typedef (value_type (function));
3741 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3742 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3743 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3744
3745 /* Set the return address. For the ARM, the return breakpoint is
3746 always at BP_ADDR. */
3747 if (arm_pc_is_thumb (gdbarch, bp_addr))
3748 bp_addr |= 1;
3749 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3750
3751 /* Walk through the list of args and determine how large a temporary
3752 stack is required. Need to take care here as structs may be
3753 passed on the stack, and we have to push them. */
3754 nstack = 0;
3755
3756 argreg = ARM_A1_REGNUM;
3757 nstack = 0;
3758
3759 /* The struct_return pointer occupies the first parameter
3760 passing register. */
3761 if (struct_return)
3762 {
3763 if (arm_debug)
3764 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3765 gdbarch_register_name (gdbarch, argreg),
3766 paddress (gdbarch, struct_addr));
3767 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3768 argreg++;
3769 }
3770
3771 for (argnum = 0; argnum < nargs; argnum++)
3772 {
3773 int len;
3774 struct type *arg_type;
3775 struct type *target_type;
3776 enum type_code typecode;
3777 const bfd_byte *val;
3778 int align;
3779 enum arm_vfp_cprc_base_type vfp_base_type;
3780 int vfp_base_count;
3781 int may_use_core_reg = 1;
3782
3783 arg_type = check_typedef (value_type (args[argnum]));
3784 len = TYPE_LENGTH (arg_type);
3785 target_type = TYPE_TARGET_TYPE (arg_type);
3786 typecode = TYPE_CODE (arg_type);
3787 val = value_contents (args[argnum]);
3788
3789 align = arm_type_align (arg_type);
3790 /* Round alignment up to a whole number of words. */
3791 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3792 /* Different ABIs have different maximum alignments. */
3793 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3794 {
3795 /* The APCS ABI only requires word alignment. */
3796 align = INT_REGISTER_SIZE;
3797 }
3798 else
3799 {
3800 /* The AAPCS requires at most doubleword alignment. */
3801 if (align > INT_REGISTER_SIZE * 2)
3802 align = INT_REGISTER_SIZE * 2;
3803 }
3804
3805 if (use_vfp_abi
3806 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3807 &vfp_base_count))
3808 {
3809 int regno;
3810 int unit_length;
3811 int shift;
3812 unsigned mask;
3813
3814 /* Because this is a CPRC it cannot go in a core register or
3815 cause a core register to be skipped for alignment.
3816 Either it goes in VFP registers and the rest of this loop
3817 iteration is skipped for this argument, or it goes on the
3818 stack (and the stack alignment code is correct for this
3819 case). */
3820 may_use_core_reg = 0;
3821
3822 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3823 shift = unit_length / 4;
3824 mask = (1 << (shift * vfp_base_count)) - 1;
3825 for (regno = 0; regno < 16; regno += shift)
3826 if (((vfp_regs_free >> regno) & mask) == mask)
3827 break;
3828
3829 if (regno < 16)
3830 {
3831 int reg_char;
3832 int reg_scaled;
3833 int i;
3834
3835 vfp_regs_free &= ~(mask << regno);
3836 reg_scaled = regno / shift;
3837 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3838 for (i = 0; i < vfp_base_count; i++)
3839 {
3840 char name_buf[4];
3841 int regnum;
3842 if (reg_char == 'q')
3843 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3844 val + i * unit_length);
3845 else
3846 {
3847 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3848 reg_char, reg_scaled + i);
3849 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3850 strlen (name_buf));
3851 regcache_cooked_write (regcache, regnum,
3852 val + i * unit_length);
3853 }
3854 }
3855 continue;
3856 }
3857 else
3858 {
3859 /* This CPRC could not go in VFP registers, so all VFP
3860 registers are now marked as used. */
3861 vfp_regs_free = 0;
3862 }
3863 }
3864
3865 /* Push stack padding for dowubleword alignment. */
3866 if (nstack & (align - 1))
3867 {
3868 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3869 nstack += INT_REGISTER_SIZE;
3870 }
3871
3872 /* Doubleword aligned quantities must go in even register pairs. */
3873 if (may_use_core_reg
3874 && argreg <= ARM_LAST_ARG_REGNUM
3875 && align > INT_REGISTER_SIZE
3876 && argreg & 1)
3877 argreg++;
3878
3879 /* If the argument is a pointer to a function, and it is a
3880 Thumb function, create a LOCAL copy of the value and set
3881 the THUMB bit in it. */
3882 if (TYPE_CODE_PTR == typecode
3883 && target_type != NULL
3884 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3885 {
3886 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3887 if (arm_pc_is_thumb (gdbarch, regval))
3888 {
3889 bfd_byte *copy = (bfd_byte *) alloca (len);
3890 store_unsigned_integer (copy, len, byte_order,
3891 MAKE_THUMB_ADDR (regval));
3892 val = copy;
3893 }
3894 }
3895
3896 /* Copy the argument to general registers or the stack in
3897 register-sized pieces. Large arguments are split between
3898 registers and stack. */
3899 while (len > 0)
3900 {
3901 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3902
3903 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3904 {
3905 /* The argument is being passed in a general purpose
3906 register. */
3907 CORE_ADDR regval
3908 = extract_unsigned_integer (val, partial_len, byte_order);
3909 if (byte_order == BFD_ENDIAN_BIG)
3910 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3911 if (arm_debug)
3912 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3913 argnum,
3914 gdbarch_register_name
3915 (gdbarch, argreg),
3916 phex (regval, INT_REGISTER_SIZE));
3917 regcache_cooked_write_unsigned (regcache, argreg, regval);
3918 argreg++;
3919 }
3920 else
3921 {
3922 /* Push the arguments onto the stack. */
3923 if (arm_debug)
3924 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3925 argnum, nstack);
3926 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3927 nstack += INT_REGISTER_SIZE;
3928 }
3929
3930 len -= partial_len;
3931 val += partial_len;
3932 }
3933 }
3934 /* If we have an odd number of words to push, then decrement the stack
3935 by one word now, so first stack argument will be dword aligned. */
3936 if (nstack & 4)
3937 sp -= 4;
3938
3939 while (si)
3940 {
3941 sp -= si->len;
3942 write_memory (sp, si->data, si->len);
3943 si = pop_stack_item (si);
3944 }
3945
3946 /* Finally, update teh SP register. */
3947 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3948
3949 return sp;
3950 }
3951
3952
3953 /* Always align the frame to an 8-byte boundary. This is required on
3954 some platforms and harmless on the rest. */
3955
3956 static CORE_ADDR
3957 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3958 {
3959 /* Align the stack to eight bytes. */
3960 return sp & ~ (CORE_ADDR) 7;
3961 }
3962
3963 static void
3964 print_fpu_flags (struct ui_file *file, int flags)
3965 {
3966 if (flags & (1 << 0))
3967 fputs_filtered ("IVO ", file);
3968 if (flags & (1 << 1))
3969 fputs_filtered ("DVZ ", file);
3970 if (flags & (1 << 2))
3971 fputs_filtered ("OFL ", file);
3972 if (flags & (1 << 3))
3973 fputs_filtered ("UFL ", file);
3974 if (flags & (1 << 4))
3975 fputs_filtered ("INX ", file);
3976 fputc_filtered ('\n', file);
3977 }
3978
3979 /* Print interesting information about the floating point processor
3980 (if present) or emulator. */
3981 static void
3982 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3983 struct frame_info *frame, const char *args)
3984 {
3985 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3986 int type;
3987
3988 type = (status >> 24) & 127;
3989 if (status & (1 << 31))
3990 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3991 else
3992 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3993 /* i18n: [floating point unit] mask */
3994 fputs_filtered (_("mask: "), file);
3995 print_fpu_flags (file, status >> 16);
3996 /* i18n: [floating point unit] flags */
3997 fputs_filtered (_("flags: "), file);
3998 print_fpu_flags (file, status);
3999 }
4000
4001 /* Construct the ARM extended floating point type. */
4002 static struct type *
4003 arm_ext_type (struct gdbarch *gdbarch)
4004 {
4005 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4006
4007 if (!tdep->arm_ext_type)
4008 tdep->arm_ext_type
4009 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4010 floatformats_arm_ext);
4011
4012 return tdep->arm_ext_type;
4013 }
4014
4015 static struct type *
4016 arm_neon_double_type (struct gdbarch *gdbarch)
4017 {
4018 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4019
4020 if (tdep->neon_double_type == NULL)
4021 {
4022 struct type *t, *elem;
4023
4024 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4025 TYPE_CODE_UNION);
4026 elem = builtin_type (gdbarch)->builtin_uint8;
4027 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4028 elem = builtin_type (gdbarch)->builtin_uint16;
4029 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4030 elem = builtin_type (gdbarch)->builtin_uint32;
4031 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4032 elem = builtin_type (gdbarch)->builtin_uint64;
4033 append_composite_type_field (t, "u64", elem);
4034 elem = builtin_type (gdbarch)->builtin_float;
4035 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4036 elem = builtin_type (gdbarch)->builtin_double;
4037 append_composite_type_field (t, "f64", elem);
4038
4039 TYPE_VECTOR (t) = 1;
4040 TYPE_NAME (t) = "neon_d";
4041 tdep->neon_double_type = t;
4042 }
4043
4044 return tdep->neon_double_type;
4045 }
4046
4047 /* FIXME: The vector types are not correctly ordered on big-endian
4048 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4049 bits of d0 - regardless of what unit size is being held in d0. So
4050 the offset of the first uint8 in d0 is 7, but the offset of the
4051 first float is 4. This code works as-is for little-endian
4052 targets. */
4053
4054 static struct type *
4055 arm_neon_quad_type (struct gdbarch *gdbarch)
4056 {
4057 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4058
4059 if (tdep->neon_quad_type == NULL)
4060 {
4061 struct type *t, *elem;
4062
4063 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4064 TYPE_CODE_UNION);
4065 elem = builtin_type (gdbarch)->builtin_uint8;
4066 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4067 elem = builtin_type (gdbarch)->builtin_uint16;
4068 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4069 elem = builtin_type (gdbarch)->builtin_uint32;
4070 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4071 elem = builtin_type (gdbarch)->builtin_uint64;
4072 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4073 elem = builtin_type (gdbarch)->builtin_float;
4074 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4075 elem = builtin_type (gdbarch)->builtin_double;
4076 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4077
4078 TYPE_VECTOR (t) = 1;
4079 TYPE_NAME (t) = "neon_q";
4080 tdep->neon_quad_type = t;
4081 }
4082
4083 return tdep->neon_quad_type;
4084 }
4085
4086 /* Return the GDB type object for the "standard" data type of data in
4087 register N. */
4088
4089 static struct type *
4090 arm_register_type (struct gdbarch *gdbarch, int regnum)
4091 {
4092 int num_regs = gdbarch_num_regs (gdbarch);
4093
4094 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4095 && regnum >= num_regs && regnum < num_regs + 32)
4096 return builtin_type (gdbarch)->builtin_float;
4097
4098 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4099 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4100 return arm_neon_quad_type (gdbarch);
4101
4102 /* If the target description has register information, we are only
4103 in this function so that we can override the types of
4104 double-precision registers for NEON. */
4105 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4106 {
4107 struct type *t = tdesc_register_type (gdbarch, regnum);
4108
4109 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4110 && TYPE_CODE (t) == TYPE_CODE_FLT
4111 && gdbarch_tdep (gdbarch)->have_neon)
4112 return arm_neon_double_type (gdbarch);
4113 else
4114 return t;
4115 }
4116
4117 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4118 {
4119 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4120 return builtin_type (gdbarch)->builtin_void;
4121
4122 return arm_ext_type (gdbarch);
4123 }
4124 else if (regnum == ARM_SP_REGNUM)
4125 return builtin_type (gdbarch)->builtin_data_ptr;
4126 else if (regnum == ARM_PC_REGNUM)
4127 return builtin_type (gdbarch)->builtin_func_ptr;
4128 else if (regnum >= ARRAY_SIZE (arm_register_names))
4129 /* These registers are only supported on targets which supply
4130 an XML description. */
4131 return builtin_type (gdbarch)->builtin_int0;
4132 else
4133 return builtin_type (gdbarch)->builtin_uint32;
4134 }
4135
4136 /* Map a DWARF register REGNUM onto the appropriate GDB register
4137 number. */
4138
4139 static int
4140 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4141 {
4142 /* Core integer regs. */
4143 if (reg >= 0 && reg <= 15)
4144 return reg;
4145
4146 /* Legacy FPA encoding. These were once used in a way which
4147 overlapped with VFP register numbering, so their use is
4148 discouraged, but GDB doesn't support the ARM toolchain
4149 which used them for VFP. */
4150 if (reg >= 16 && reg <= 23)
4151 return ARM_F0_REGNUM + reg - 16;
4152
4153 /* New assignments for the FPA registers. */
4154 if (reg >= 96 && reg <= 103)
4155 return ARM_F0_REGNUM + reg - 96;
4156
4157 /* WMMX register assignments. */
4158 if (reg >= 104 && reg <= 111)
4159 return ARM_WCGR0_REGNUM + reg - 104;
4160
4161 if (reg >= 112 && reg <= 127)
4162 return ARM_WR0_REGNUM + reg - 112;
4163
4164 if (reg >= 192 && reg <= 199)
4165 return ARM_WC0_REGNUM + reg - 192;
4166
4167 /* VFP v2 registers. A double precision value is actually
4168 in d1 rather than s2, but the ABI only defines numbering
4169 for the single precision registers. This will "just work"
4170 in GDB for little endian targets (we'll read eight bytes,
4171 starting in s0 and then progressing to s1), but will be
4172 reversed on big endian targets with VFP. This won't
4173 be a problem for the new Neon quad registers; you're supposed
4174 to use DW_OP_piece for those. */
4175 if (reg >= 64 && reg <= 95)
4176 {
4177 char name_buf[4];
4178
4179 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4180 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4181 strlen (name_buf));
4182 }
4183
4184 /* VFP v3 / Neon registers. This range is also used for VFP v2
4185 registers, except that it now describes d0 instead of s0. */
4186 if (reg >= 256 && reg <= 287)
4187 {
4188 char name_buf[4];
4189
4190 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4191 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4192 strlen (name_buf));
4193 }
4194
4195 return -1;
4196 }
4197
4198 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4199 static int
4200 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4201 {
4202 int reg = regnum;
4203 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4204
4205 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4206 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4207
4208 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4209 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4210
4211 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4212 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4213
4214 if (reg < NUM_GREGS)
4215 return SIM_ARM_R0_REGNUM + reg;
4216 reg -= NUM_GREGS;
4217
4218 if (reg < NUM_FREGS)
4219 return SIM_ARM_FP0_REGNUM + reg;
4220 reg -= NUM_FREGS;
4221
4222 if (reg < NUM_SREGS)
4223 return SIM_ARM_FPS_REGNUM + reg;
4224 reg -= NUM_SREGS;
4225
4226 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4227 }
4228
4229 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4230 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4231 It is thought that this is is the floating-point register format on
4232 little-endian systems. */
4233
4234 static void
4235 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4236 void *dbl, int endianess)
4237 {
4238 DOUBLEST d;
4239
4240 if (endianess == BFD_ENDIAN_BIG)
4241 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4242 else
4243 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4244 ptr, &d);
4245 floatformat_from_doublest (fmt, &d, dbl);
4246 }
4247
4248 static void
4249 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4250 int endianess)
4251 {
4252 DOUBLEST d;
4253
4254 floatformat_to_doublest (fmt, ptr, &d);
4255 if (endianess == BFD_ENDIAN_BIG)
4256 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4257 else
4258 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4259 &d, dbl);
4260 }
4261
4262 static int
4263 condition_true (unsigned long cond, unsigned long status_reg)
4264 {
4265 if (cond == INST_AL || cond == INST_NV)
4266 return 1;
4267
4268 switch (cond)
4269 {
4270 case INST_EQ:
4271 return ((status_reg & FLAG_Z) != 0);
4272 case INST_NE:
4273 return ((status_reg & FLAG_Z) == 0);
4274 case INST_CS:
4275 return ((status_reg & FLAG_C) != 0);
4276 case INST_CC:
4277 return ((status_reg & FLAG_C) == 0);
4278 case INST_MI:
4279 return ((status_reg & FLAG_N) != 0);
4280 case INST_PL:
4281 return ((status_reg & FLAG_N) == 0);
4282 case INST_VS:
4283 return ((status_reg & FLAG_V) != 0);
4284 case INST_VC:
4285 return ((status_reg & FLAG_V) == 0);
4286 case INST_HI:
4287 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4288 case INST_LS:
4289 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4290 case INST_GE:
4291 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4292 case INST_LT:
4293 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4294 case INST_GT:
4295 return (((status_reg & FLAG_Z) == 0)
4296 && (((status_reg & FLAG_N) == 0)
4297 == ((status_reg & FLAG_V) == 0)));
4298 case INST_LE:
4299 return (((status_reg & FLAG_Z) != 0)
4300 || (((status_reg & FLAG_N) == 0)
4301 != ((status_reg & FLAG_V) == 0)));
4302 }
4303 return 1;
4304 }
4305
4306 static unsigned long
4307 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4308 unsigned long pc_val, unsigned long status_reg)
4309 {
4310 unsigned long res, shift;
4311 int rm = bits (inst, 0, 3);
4312 unsigned long shifttype = bits (inst, 5, 6);
4313
4314 if (bit (inst, 4))
4315 {
4316 int rs = bits (inst, 8, 11);
4317 shift = (rs == 15 ? pc_val + 8
4318 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4319 }
4320 else
4321 shift = bits (inst, 7, 11);
4322
4323 res = (rm == ARM_PC_REGNUM
4324 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4325 : get_frame_register_unsigned (frame, rm));
4326
4327 switch (shifttype)
4328 {
4329 case 0: /* LSL */
4330 res = shift >= 32 ? 0 : res << shift;
4331 break;
4332
4333 case 1: /* LSR */
4334 res = shift >= 32 ? 0 : res >> shift;
4335 break;
4336
4337 case 2: /* ASR */
4338 if (shift >= 32)
4339 shift = 31;
4340 res = ((res & 0x80000000L)
4341 ? ~((~res) >> shift) : res >> shift);
4342 break;
4343
4344 case 3: /* ROR/RRX */
4345 shift &= 31;
4346 if (shift == 0)
4347 res = (res >> 1) | (carry ? 0x80000000L : 0);
4348 else
4349 res = (res >> shift) | (res << (32 - shift));
4350 break;
4351 }
4352
4353 return res & 0xffffffff;
4354 }
4355
4356 /* Return number of 1-bits in VAL. */
4357
4358 static int
4359 bitcount (unsigned long val)
4360 {
4361 int nbits;
4362 for (nbits = 0; val != 0; nbits++)
4363 val &= val - 1; /* Delete rightmost 1-bit in val. */
4364 return nbits;
4365 }
4366
4367 /* Return the size in bytes of the complete Thumb instruction whose
4368 first halfword is INST1. */
4369
4370 static int
4371 thumb_insn_size (unsigned short inst1)
4372 {
4373 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4374 return 4;
4375 else
4376 return 2;
4377 }
4378
4379 static int
4380 thumb_advance_itstate (unsigned int itstate)
4381 {
4382 /* Preserve IT[7:5], the first three bits of the condition. Shift
4383 the upcoming condition flags left by one bit. */
4384 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4385
4386 /* If we have finished the IT block, clear the state. */
4387 if ((itstate & 0x0f) == 0)
4388 itstate = 0;
4389
4390 return itstate;
4391 }
4392
4393 /* Find the next PC after the current instruction executes. In some
4394 cases we can not statically determine the answer (see the IT state
4395 handling in this function); in that case, a breakpoint may be
4396 inserted in addition to the returned PC, which will be used to set
4397 another breakpoint by our caller. */
4398
4399 static CORE_ADDR
4400 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4401 {
4402 struct gdbarch *gdbarch = get_frame_arch (frame);
4403 struct address_space *aspace = get_frame_address_space (frame);
4404 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4405 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4406 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4407 unsigned short inst1;
4408 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4409 unsigned long offset;
4410 ULONGEST status, itstate;
4411
4412 nextpc = MAKE_THUMB_ADDR (nextpc);
4413 pc_val = MAKE_THUMB_ADDR (pc_val);
4414
4415 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4416
4417 /* Thumb-2 conditional execution support. There are eight bits in
4418 the CPSR which describe conditional execution state. Once
4419 reconstructed (they're in a funny order), the low five bits
4420 describe the low bit of the condition for each instruction and
4421 how many instructions remain. The high three bits describe the
4422 base condition. One of the low four bits will be set if an IT
4423 block is active. These bits read as zero on earlier
4424 processors. */
4425 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4426 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4427
4428 /* If-Then handling. On GNU/Linux, where this routine is used, we
4429 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4430 can disable execution of the undefined instruction. So we might
4431 miss the breakpoint if we set it on a skipped conditional
4432 instruction. Because conditional instructions can change the
4433 flags, affecting the execution of further instructions, we may
4434 need to set two breakpoints. */
4435
4436 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4437 {
4438 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4439 {
4440 /* An IT instruction. Because this instruction does not
4441 modify the flags, we can accurately predict the next
4442 executed instruction. */
4443 itstate = inst1 & 0x00ff;
4444 pc += thumb_insn_size (inst1);
4445
4446 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4447 {
4448 inst1 = read_memory_unsigned_integer (pc, 2,
4449 byte_order_for_code);
4450 pc += thumb_insn_size (inst1);
4451 itstate = thumb_advance_itstate (itstate);
4452 }
4453
4454 return MAKE_THUMB_ADDR (pc);
4455 }
4456 else if (itstate != 0)
4457 {
4458 /* We are in a conditional block. Check the condition. */
4459 if (! condition_true (itstate >> 4, status))
4460 {
4461 /* Advance to the next executed instruction. */
4462 pc += thumb_insn_size (inst1);
4463 itstate = thumb_advance_itstate (itstate);
4464
4465 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4466 {
4467 inst1 = read_memory_unsigned_integer (pc, 2,
4468 byte_order_for_code);
4469 pc += thumb_insn_size (inst1);
4470 itstate = thumb_advance_itstate (itstate);
4471 }
4472
4473 return MAKE_THUMB_ADDR (pc);
4474 }
4475 else if ((itstate & 0x0f) == 0x08)
4476 {
4477 /* This is the last instruction of the conditional
4478 block, and it is executed. We can handle it normally
4479 because the following instruction is not conditional,
4480 and we must handle it normally because it is
4481 permitted to branch. Fall through. */
4482 }
4483 else
4484 {
4485 int cond_negated;
4486
4487 /* There are conditional instructions after this one.
4488 If this instruction modifies the flags, then we can
4489 not predict what the next executed instruction will
4490 be. Fortunately, this instruction is architecturally
4491 forbidden to branch; we know it will fall through.
4492 Start by skipping past it. */
4493 pc += thumb_insn_size (inst1);
4494 itstate = thumb_advance_itstate (itstate);
4495
4496 /* Set a breakpoint on the following instruction. */
4497 gdb_assert ((itstate & 0x0f) != 0);
4498 arm_insert_single_step_breakpoint (gdbarch, aspace,
4499 MAKE_THUMB_ADDR (pc));
4500 cond_negated = (itstate >> 4) & 1;
4501
4502 /* Skip all following instructions with the same
4503 condition. If there is a later instruction in the IT
4504 block with the opposite condition, set the other
4505 breakpoint there. If not, then set a breakpoint on
4506 the instruction after the IT block. */
4507 do
4508 {
4509 inst1 = read_memory_unsigned_integer (pc, 2,
4510 byte_order_for_code);
4511 pc += thumb_insn_size (inst1);
4512 itstate = thumb_advance_itstate (itstate);
4513 }
4514 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4515
4516 return MAKE_THUMB_ADDR (pc);
4517 }
4518 }
4519 }
4520 else if (itstate & 0x0f)
4521 {
4522 /* We are in a conditional block. Check the condition. */
4523 int cond = itstate >> 4;
4524
4525 if (! condition_true (cond, status))
4526 /* Advance to the next instruction. All the 32-bit
4527 instructions share a common prefix. */
4528 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4529
4530 /* Otherwise, handle the instruction normally. */
4531 }
4532
4533 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4534 {
4535 CORE_ADDR sp;
4536
4537 /* Fetch the saved PC from the stack. It's stored above
4538 all of the other registers. */
4539 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4540 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4541 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4542 }
4543 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4544 {
4545 unsigned long cond = bits (inst1, 8, 11);
4546 if (cond == 0x0f) /* 0x0f = SWI */
4547 {
4548 struct gdbarch_tdep *tdep;
4549 tdep = gdbarch_tdep (gdbarch);
4550
4551 if (tdep->syscall_next_pc != NULL)
4552 nextpc = tdep->syscall_next_pc (frame);
4553
4554 }
4555 else if (cond != 0x0f && condition_true (cond, status))
4556 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4557 }
4558 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4559 {
4560 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4561 }
4562 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4563 {
4564 unsigned short inst2;
4565 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4566
4567 /* Default to the next instruction. */
4568 nextpc = pc + 4;
4569 nextpc = MAKE_THUMB_ADDR (nextpc);
4570
4571 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4572 {
4573 /* Branches and miscellaneous control instructions. */
4574
4575 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4576 {
4577 /* B, BL, BLX. */
4578 int j1, j2, imm1, imm2;
4579
4580 imm1 = sbits (inst1, 0, 10);
4581 imm2 = bits (inst2, 0, 10);
4582 j1 = bit (inst2, 13);
4583 j2 = bit (inst2, 11);
4584
4585 offset = ((imm1 << 12) + (imm2 << 1));
4586 offset ^= ((!j2) << 22) | ((!j1) << 23);
4587
4588 nextpc = pc_val + offset;
4589 /* For BLX make sure to clear the low bits. */
4590 if (bit (inst2, 12) == 0)
4591 nextpc = nextpc & 0xfffffffc;
4592 }
4593 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4594 {
4595 /* SUBS PC, LR, #imm8. */
4596 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4597 nextpc -= inst2 & 0x00ff;
4598 }
4599 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4600 {
4601 /* Conditional branch. */
4602 if (condition_true (bits (inst1, 6, 9), status))
4603 {
4604 int sign, j1, j2, imm1, imm2;
4605
4606 sign = sbits (inst1, 10, 10);
4607 imm1 = bits (inst1, 0, 5);
4608 imm2 = bits (inst2, 0, 10);
4609 j1 = bit (inst2, 13);
4610 j2 = bit (inst2, 11);
4611
4612 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4613 offset += (imm1 << 12) + (imm2 << 1);
4614
4615 nextpc = pc_val + offset;
4616 }
4617 }
4618 }
4619 else if ((inst1 & 0xfe50) == 0xe810)
4620 {
4621 /* Load multiple or RFE. */
4622 int rn, offset, load_pc = 1;
4623
4624 rn = bits (inst1, 0, 3);
4625 if (bit (inst1, 7) && !bit (inst1, 8))
4626 {
4627 /* LDMIA or POP */
4628 if (!bit (inst2, 15))
4629 load_pc = 0;
4630 offset = bitcount (inst2) * 4 - 4;
4631 }
4632 else if (!bit (inst1, 7) && bit (inst1, 8))
4633 {
4634 /* LDMDB */
4635 if (!bit (inst2, 15))
4636 load_pc = 0;
4637 offset = -4;
4638 }
4639 else if (bit (inst1, 7) && bit (inst1, 8))
4640 {
4641 /* RFEIA */
4642 offset = 0;
4643 }
4644 else if (!bit (inst1, 7) && !bit (inst1, 8))
4645 {
4646 /* RFEDB */
4647 offset = -8;
4648 }
4649 else
4650 load_pc = 0;
4651
4652 if (load_pc)
4653 {
4654 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4655 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4656 }
4657 }
4658 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4659 {
4660 /* MOV PC or MOVS PC. */
4661 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4662 nextpc = MAKE_THUMB_ADDR (nextpc);
4663 }
4664 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4665 {
4666 /* LDR PC. */
4667 CORE_ADDR base;
4668 int rn, load_pc = 1;
4669
4670 rn = bits (inst1, 0, 3);
4671 base = get_frame_register_unsigned (frame, rn);
4672 if (rn == ARM_PC_REGNUM)
4673 {
4674 base = (base + 4) & ~(CORE_ADDR) 0x3;
4675 if (bit (inst1, 7))
4676 base += bits (inst2, 0, 11);
4677 else
4678 base -= bits (inst2, 0, 11);
4679 }
4680 else if (bit (inst1, 7))
4681 base += bits (inst2, 0, 11);
4682 else if (bit (inst2, 11))
4683 {
4684 if (bit (inst2, 10))
4685 {
4686 if (bit (inst2, 9))
4687 base += bits (inst2, 0, 7);
4688 else
4689 base -= bits (inst2, 0, 7);
4690 }
4691 }
4692 else if ((inst2 & 0x0fc0) == 0x0000)
4693 {
4694 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4695 base += get_frame_register_unsigned (frame, rm) << shift;
4696 }
4697 else
4698 /* Reserved. */
4699 load_pc = 0;
4700
4701 if (load_pc)
4702 nextpc = get_frame_memory_unsigned (frame, base, 4);
4703 }
4704 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4705 {
4706 /* TBB. */
4707 CORE_ADDR tbl_reg, table, offset, length;
4708
4709 tbl_reg = bits (inst1, 0, 3);
4710 if (tbl_reg == 0x0f)
4711 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4712 else
4713 table = get_frame_register_unsigned (frame, tbl_reg);
4714
4715 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4716 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4717 nextpc = pc_val + length;
4718 }
4719 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4720 {
4721 /* TBH. */
4722 CORE_ADDR tbl_reg, table, offset, length;
4723
4724 tbl_reg = bits (inst1, 0, 3);
4725 if (tbl_reg == 0x0f)
4726 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4727 else
4728 table = get_frame_register_unsigned (frame, tbl_reg);
4729
4730 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4731 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4732 nextpc = pc_val + length;
4733 }
4734 }
4735 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4736 {
4737 if (bits (inst1, 3, 6) == 0x0f)
4738 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4739 else
4740 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4741 }
4742 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4743 {
4744 if (bits (inst1, 3, 6) == 0x0f)
4745 nextpc = pc_val;
4746 else
4747 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4748
4749 nextpc = MAKE_THUMB_ADDR (nextpc);
4750 }
4751 else if ((inst1 & 0xf500) == 0xb100)
4752 {
4753 /* CBNZ or CBZ. */
4754 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4755 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4756
4757 if (bit (inst1, 11) && reg != 0)
4758 nextpc = pc_val + imm;
4759 else if (!bit (inst1, 11) && reg == 0)
4760 nextpc = pc_val + imm;
4761 }
4762 return nextpc;
4763 }
4764
4765 /* Get the raw next address. PC is the current program counter, in
4766 FRAME, which is assumed to be executing in ARM mode.
4767
4768 The value returned has the execution state of the next instruction
4769 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4770 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4771 address. */
4772
4773 static CORE_ADDR
4774 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4775 {
4776 struct gdbarch *gdbarch = get_frame_arch (frame);
4777 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4778 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4779 unsigned long pc_val;
4780 unsigned long this_instr;
4781 unsigned long status;
4782 CORE_ADDR nextpc;
4783
4784 pc_val = (unsigned long) pc;
4785 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4786
4787 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4788 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4789
4790 if (bits (this_instr, 28, 31) == INST_NV)
4791 switch (bits (this_instr, 24, 27))
4792 {
4793 case 0xa:
4794 case 0xb:
4795 {
4796 /* Branch with Link and change to Thumb. */
4797 nextpc = BranchDest (pc, this_instr);
4798 nextpc |= bit (this_instr, 24) << 1;
4799 nextpc = MAKE_THUMB_ADDR (nextpc);
4800 break;
4801 }
4802 case 0xc:
4803 case 0xd:
4804 case 0xe:
4805 /* Coprocessor register transfer. */
4806 if (bits (this_instr, 12, 15) == 15)
4807 error (_("Invalid update to pc in instruction"));
4808 break;
4809 }
4810 else if (condition_true (bits (this_instr, 28, 31), status))
4811 {
4812 switch (bits (this_instr, 24, 27))
4813 {
4814 case 0x0:
4815 case 0x1: /* data processing */
4816 case 0x2:
4817 case 0x3:
4818 {
4819 unsigned long operand1, operand2, result = 0;
4820 unsigned long rn;
4821 int c;
4822
4823 if (bits (this_instr, 12, 15) != 15)
4824 break;
4825
4826 if (bits (this_instr, 22, 25) == 0
4827 && bits (this_instr, 4, 7) == 9) /* multiply */
4828 error (_("Invalid update to pc in instruction"));
4829
4830 /* BX <reg>, BLX <reg> */
4831 if (bits (this_instr, 4, 27) == 0x12fff1
4832 || bits (this_instr, 4, 27) == 0x12fff3)
4833 {
4834 rn = bits (this_instr, 0, 3);
4835 nextpc = ((rn == ARM_PC_REGNUM)
4836 ? (pc_val + 8)
4837 : get_frame_register_unsigned (frame, rn));
4838
4839 return nextpc;
4840 }
4841
4842 /* Multiply into PC. */
4843 c = (status & FLAG_C) ? 1 : 0;
4844 rn = bits (this_instr, 16, 19);
4845 operand1 = ((rn == ARM_PC_REGNUM)
4846 ? (pc_val + 8)
4847 : get_frame_register_unsigned (frame, rn));
4848
4849 if (bit (this_instr, 25))
4850 {
4851 unsigned long immval = bits (this_instr, 0, 7);
4852 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4853 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4854 & 0xffffffff;
4855 }
4856 else /* operand 2 is a shifted register. */
4857 operand2 = shifted_reg_val (frame, this_instr, c,
4858 pc_val, status);
4859
4860 switch (bits (this_instr, 21, 24))
4861 {
4862 case 0x0: /*and */
4863 result = operand1 & operand2;
4864 break;
4865
4866 case 0x1: /*eor */
4867 result = operand1 ^ operand2;
4868 break;
4869
4870 case 0x2: /*sub */
4871 result = operand1 - operand2;
4872 break;
4873
4874 case 0x3: /*rsb */
4875 result = operand2 - operand1;
4876 break;
4877
4878 case 0x4: /*add */
4879 result = operand1 + operand2;
4880 break;
4881
4882 case 0x5: /*adc */
4883 result = operand1 + operand2 + c;
4884 break;
4885
4886 case 0x6: /*sbc */
4887 result = operand1 - operand2 + c;
4888 break;
4889
4890 case 0x7: /*rsc */
4891 result = operand2 - operand1 + c;
4892 break;
4893
4894 case 0x8:
4895 case 0x9:
4896 case 0xa:
4897 case 0xb: /* tst, teq, cmp, cmn */
4898 result = (unsigned long) nextpc;
4899 break;
4900
4901 case 0xc: /*orr */
4902 result = operand1 | operand2;
4903 break;
4904
4905 case 0xd: /*mov */
4906 /* Always step into a function. */
4907 result = operand2;
4908 break;
4909
4910 case 0xe: /*bic */
4911 result = operand1 & ~operand2;
4912 break;
4913
4914 case 0xf: /*mvn */
4915 result = ~operand2;
4916 break;
4917 }
4918
4919 /* In 26-bit APCS the bottom two bits of the result are
4920 ignored, and we always end up in ARM state. */
4921 if (!arm_apcs_32)
4922 nextpc = arm_addr_bits_remove (gdbarch, result);
4923 else
4924 nextpc = result;
4925
4926 break;
4927 }
4928
4929 case 0x4:
4930 case 0x5: /* data transfer */
4931 case 0x6:
4932 case 0x7:
4933 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
4934 {
4935 /* Media instructions and architecturally undefined
4936 instructions. */
4937 break;
4938 }
4939
4940 if (bit (this_instr, 20))
4941 {
4942 /* load */
4943 if (bits (this_instr, 12, 15) == 15)
4944 {
4945 /* rd == pc */
4946 unsigned long rn;
4947 unsigned long base;
4948
4949 if (bit (this_instr, 22))
4950 error (_("Invalid update to pc in instruction"));
4951
4952 /* byte write to PC */
4953 rn = bits (this_instr, 16, 19);
4954 base = ((rn == ARM_PC_REGNUM)
4955 ? (pc_val + 8)
4956 : get_frame_register_unsigned (frame, rn));
4957
4958 if (bit (this_instr, 24))
4959 {
4960 /* pre-indexed */
4961 int c = (status & FLAG_C) ? 1 : 0;
4962 unsigned long offset =
4963 (bit (this_instr, 25)
4964 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4965 : bits (this_instr, 0, 11));
4966
4967 if (bit (this_instr, 23))
4968 base += offset;
4969 else
4970 base -= offset;
4971 }
4972 nextpc =
4973 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4974 4, byte_order);
4975 }
4976 }
4977 break;
4978
4979 case 0x8:
4980 case 0x9: /* block transfer */
4981 if (bit (this_instr, 20))
4982 {
4983 /* LDM */
4984 if (bit (this_instr, 15))
4985 {
4986 /* loading pc */
4987 int offset = 0;
4988 unsigned long rn_val
4989 = get_frame_register_unsigned (frame,
4990 bits (this_instr, 16, 19));
4991
4992 if (bit (this_instr, 23))
4993 {
4994 /* up */
4995 unsigned long reglist = bits (this_instr, 0, 14);
4996 offset = bitcount (reglist) * 4;
4997 if (bit (this_instr, 24)) /* pre */
4998 offset += 4;
4999 }
5000 else if (bit (this_instr, 24))
5001 offset = -4;
5002
5003 nextpc =
5004 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5005 (rn_val + offset),
5006 4, byte_order);
5007 }
5008 }
5009 break;
5010
5011 case 0xb: /* branch & link */
5012 case 0xa: /* branch */
5013 {
5014 nextpc = BranchDest (pc, this_instr);
5015 break;
5016 }
5017
5018 case 0xc:
5019 case 0xd:
5020 case 0xe: /* coproc ops */
5021 break;
5022 case 0xf: /* SWI */
5023 {
5024 struct gdbarch_tdep *tdep;
5025 tdep = gdbarch_tdep (gdbarch);
5026
5027 if (tdep->syscall_next_pc != NULL)
5028 nextpc = tdep->syscall_next_pc (frame);
5029
5030 }
5031 break;
5032
5033 default:
5034 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5035 return (pc);
5036 }
5037 }
5038
5039 return nextpc;
5040 }
5041
5042 /* Determine next PC after current instruction executes. Will call either
5043 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5044 loop is detected. */
5045
5046 CORE_ADDR
5047 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5048 {
5049 CORE_ADDR nextpc;
5050
5051 if (arm_frame_is_thumb (frame))
5052 nextpc = thumb_get_next_pc_raw (frame, pc);
5053 else
5054 nextpc = arm_get_next_pc_raw (frame, pc);
5055
5056 return nextpc;
5057 }
5058
5059 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5060 of the appropriate mode (as encoded in the PC value), even if this
5061 differs from what would be expected according to the symbol tables. */
5062
5063 void
5064 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5065 struct address_space *aspace,
5066 CORE_ADDR pc)
5067 {
5068 struct cleanup *old_chain
5069 = make_cleanup_restore_integer (&arm_override_mode);
5070
5071 arm_override_mode = IS_THUMB_ADDR (pc);
5072 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5073
5074 insert_single_step_breakpoint (gdbarch, aspace, pc);
5075
5076 do_cleanups (old_chain);
5077 }
5078
5079 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5080 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5081 is found, attempt to step through it. A breakpoint is placed at the end of
5082 the sequence. */
5083
5084 static int
5085 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5086 {
5087 struct gdbarch *gdbarch = get_frame_arch (frame);
5088 struct address_space *aspace = get_frame_address_space (frame);
5089 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5090 CORE_ADDR pc = get_frame_pc (frame);
5091 CORE_ADDR breaks[2] = {-1, -1};
5092 CORE_ADDR loc = pc;
5093 unsigned short insn1, insn2;
5094 int insn_count;
5095 int index;
5096 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5097 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5098 ULONGEST status, itstate;
5099
5100 /* We currently do not support atomic sequences within an IT block. */
5101 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5102 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5103 if (itstate & 0x0f)
5104 return 0;
5105
5106 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5107 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5108 loc += 2;
5109 if (thumb_insn_size (insn1) != 4)
5110 return 0;
5111
5112 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5113 loc += 2;
5114 if (!((insn1 & 0xfff0) == 0xe850
5115 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5116 return 0;
5117
5118 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5119 instructions. */
5120 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5121 {
5122 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5123 loc += 2;
5124
5125 if (thumb_insn_size (insn1) != 4)
5126 {
5127 /* Assume that there is at most one conditional branch in the
5128 atomic sequence. If a conditional branch is found, put a
5129 breakpoint in its destination address. */
5130 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5131 {
5132 if (last_breakpoint > 0)
5133 return 0; /* More than one conditional branch found,
5134 fallback to the standard code. */
5135
5136 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5137 last_breakpoint++;
5138 }
5139
5140 /* We do not support atomic sequences that use any *other*
5141 instructions but conditional branches to change the PC.
5142 Fall back to standard code to avoid losing control of
5143 execution. */
5144 else if (thumb_instruction_changes_pc (insn1))
5145 return 0;
5146 }
5147 else
5148 {
5149 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5150 loc += 2;
5151
5152 /* Assume that there is at most one conditional branch in the
5153 atomic sequence. If a conditional branch is found, put a
5154 breakpoint in its destination address. */
5155 if ((insn1 & 0xf800) == 0xf000
5156 && (insn2 & 0xd000) == 0x8000
5157 && (insn1 & 0x0380) != 0x0380)
5158 {
5159 int sign, j1, j2, imm1, imm2;
5160 unsigned int offset;
5161
5162 sign = sbits (insn1, 10, 10);
5163 imm1 = bits (insn1, 0, 5);
5164 imm2 = bits (insn2, 0, 10);
5165 j1 = bit (insn2, 13);
5166 j2 = bit (insn2, 11);
5167
5168 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5169 offset += (imm1 << 12) + (imm2 << 1);
5170
5171 if (last_breakpoint > 0)
5172 return 0; /* More than one conditional branch found,
5173 fallback to the standard code. */
5174
5175 breaks[1] = loc + offset;
5176 last_breakpoint++;
5177 }
5178
5179 /* We do not support atomic sequences that use any *other*
5180 instructions but conditional branches to change the PC.
5181 Fall back to standard code to avoid losing control of
5182 execution. */
5183 else if (thumb2_instruction_changes_pc (insn1, insn2))
5184 return 0;
5185
5186 /* If we find a strex{,b,h,d}, we're done. */
5187 if ((insn1 & 0xfff0) == 0xe840
5188 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5189 break;
5190 }
5191 }
5192
5193 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5194 if (insn_count == atomic_sequence_length)
5195 return 0;
5196
5197 /* Insert a breakpoint right after the end of the atomic sequence. */
5198 breaks[0] = loc;
5199
5200 /* Check for duplicated breakpoints. Check also for a breakpoint
5201 placed (branch instruction's destination) anywhere in sequence. */
5202 if (last_breakpoint
5203 && (breaks[1] == breaks[0]
5204 || (breaks[1] >= pc && breaks[1] < loc)))
5205 last_breakpoint = 0;
5206
5207 /* Effectively inserts the breakpoints. */
5208 for (index = 0; index <= last_breakpoint; index++)
5209 arm_insert_single_step_breakpoint (gdbarch, aspace,
5210 MAKE_THUMB_ADDR (breaks[index]));
5211
5212 return 1;
5213 }
5214
5215 static int
5216 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5217 {
5218 struct gdbarch *gdbarch = get_frame_arch (frame);
5219 struct address_space *aspace = get_frame_address_space (frame);
5220 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5221 CORE_ADDR pc = get_frame_pc (frame);
5222 CORE_ADDR breaks[2] = {-1, -1};
5223 CORE_ADDR loc = pc;
5224 unsigned int insn;
5225 int insn_count;
5226 int index;
5227 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5228 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5229
5230 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5231 Note that we do not currently support conditionally executed atomic
5232 instructions. */
5233 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5234 loc += 4;
5235 if ((insn & 0xff9000f0) != 0xe1900090)
5236 return 0;
5237
5238 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5239 instructions. */
5240 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5241 {
5242 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5243 loc += 4;
5244
5245 /* Assume that there is at most one conditional branch in the atomic
5246 sequence. If a conditional branch is found, put a breakpoint in
5247 its destination address. */
5248 if (bits (insn, 24, 27) == 0xa)
5249 {
5250 if (last_breakpoint > 0)
5251 return 0; /* More than one conditional branch found, fallback
5252 to the standard single-step code. */
5253
5254 breaks[1] = BranchDest (loc - 4, insn);
5255 last_breakpoint++;
5256 }
5257
5258 /* We do not support atomic sequences that use any *other* instructions
5259 but conditional branches to change the PC. Fall back to standard
5260 code to avoid losing control of execution. */
5261 else if (arm_instruction_changes_pc (insn))
5262 return 0;
5263
5264 /* If we find a strex{,b,h,d}, we're done. */
5265 if ((insn & 0xff9000f0) == 0xe1800090)
5266 break;
5267 }
5268
5269 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5270 if (insn_count == atomic_sequence_length)
5271 return 0;
5272
5273 /* Insert a breakpoint right after the end of the atomic sequence. */
5274 breaks[0] = loc;
5275
5276 /* Check for duplicated breakpoints. Check also for a breakpoint
5277 placed (branch instruction's destination) anywhere in sequence. */
5278 if (last_breakpoint
5279 && (breaks[1] == breaks[0]
5280 || (breaks[1] >= pc && breaks[1] < loc)))
5281 last_breakpoint = 0;
5282
5283 /* Effectively inserts the breakpoints. */
5284 for (index = 0; index <= last_breakpoint; index++)
5285 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5286
5287 return 1;
5288 }
5289
5290 int
5291 arm_deal_with_atomic_sequence (struct frame_info *frame)
5292 {
5293 if (arm_frame_is_thumb (frame))
5294 return thumb_deal_with_atomic_sequence_raw (frame);
5295 else
5296 return arm_deal_with_atomic_sequence_raw (frame);
5297 }
5298
5299 /* single_step() is called just before we want to resume the inferior,
5300 if we want to single-step it but there is no hardware or kernel
5301 single-step support. We find the target of the coming instruction
5302 and breakpoint it. */
5303
5304 int
5305 arm_software_single_step (struct frame_info *frame)
5306 {
5307 struct gdbarch *gdbarch = get_frame_arch (frame);
5308 struct address_space *aspace = get_frame_address_space (frame);
5309 CORE_ADDR next_pc;
5310
5311 if (arm_deal_with_atomic_sequence (frame))
5312 return 1;
5313
5314 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5315 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5316
5317 return 1;
5318 }
5319
5320 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5321 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5322 NULL if an error occurs. BUF is freed. */
5323
5324 static gdb_byte *
5325 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5326 int old_len, int new_len)
5327 {
5328 gdb_byte *new_buf;
5329 int bytes_to_read = new_len - old_len;
5330
5331 new_buf = (gdb_byte *) xmalloc (new_len);
5332 memcpy (new_buf + bytes_to_read, buf, old_len);
5333 xfree (buf);
5334 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5335 {
5336 xfree (new_buf);
5337 return NULL;
5338 }
5339 return new_buf;
5340 }
5341
5342 /* An IT block is at most the 2-byte IT instruction followed by
5343 four 4-byte instructions. The furthest back we must search to
5344 find an IT block that affects the current instruction is thus
5345 2 + 3 * 4 == 14 bytes. */
5346 #define MAX_IT_BLOCK_PREFIX 14
5347
5348 /* Use a quick scan if there are more than this many bytes of
5349 code. */
5350 #define IT_SCAN_THRESHOLD 32
5351
5352 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5353 A breakpoint in an IT block may not be hit, depending on the
5354 condition flags. */
5355 static CORE_ADDR
5356 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5357 {
5358 gdb_byte *buf;
5359 char map_type;
5360 CORE_ADDR boundary, func_start;
5361 int buf_len;
5362 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5363 int i, any, last_it, last_it_count;
5364
5365 /* If we are using BKPT breakpoints, none of this is necessary. */
5366 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5367 return bpaddr;
5368
5369 /* ARM mode does not have this problem. */
5370 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5371 return bpaddr;
5372
5373 /* We are setting a breakpoint in Thumb code that could potentially
5374 contain an IT block. The first step is to find how much Thumb
5375 code there is; we do not need to read outside of known Thumb
5376 sequences. */
5377 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5378 if (map_type == 0)
5379 /* Thumb-2 code must have mapping symbols to have a chance. */
5380 return bpaddr;
5381
5382 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5383
5384 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5385 && func_start > boundary)
5386 boundary = func_start;
5387
5388 /* Search for a candidate IT instruction. We have to do some fancy
5389 footwork to distinguish a real IT instruction from the second
5390 half of a 32-bit instruction, but there is no need for that if
5391 there's no candidate. */
5392 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5393 if (buf_len == 0)
5394 /* No room for an IT instruction. */
5395 return bpaddr;
5396
5397 buf = (gdb_byte *) xmalloc (buf_len);
5398 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5399 return bpaddr;
5400 any = 0;
5401 for (i = 0; i < buf_len; i += 2)
5402 {
5403 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5404 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5405 {
5406 any = 1;
5407 break;
5408 }
5409 }
5410 if (any == 0)
5411 {
5412 xfree (buf);
5413 return bpaddr;
5414 }
5415
5416 /* OK, the code bytes before this instruction contain at least one
5417 halfword which resembles an IT instruction. We know that it's
5418 Thumb code, but there are still two possibilities. Either the
5419 halfword really is an IT instruction, or it is the second half of
5420 a 32-bit Thumb instruction. The only way we can tell is to
5421 scan forwards from a known instruction boundary. */
5422 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5423 {
5424 int definite;
5425
5426 /* There's a lot of code before this instruction. Start with an
5427 optimistic search; it's easy to recognize halfwords that can
5428 not be the start of a 32-bit instruction, and use that to
5429 lock on to the instruction boundaries. */
5430 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5431 if (buf == NULL)
5432 return bpaddr;
5433 buf_len = IT_SCAN_THRESHOLD;
5434
5435 definite = 0;
5436 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5437 {
5438 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5439 if (thumb_insn_size (inst1) == 2)
5440 {
5441 definite = 1;
5442 break;
5443 }
5444 }
5445
5446 /* At this point, if DEFINITE, BUF[I] is the first place we
5447 are sure that we know the instruction boundaries, and it is far
5448 enough from BPADDR that we could not miss an IT instruction
5449 affecting BPADDR. If ! DEFINITE, give up - start from a
5450 known boundary. */
5451 if (! definite)
5452 {
5453 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5454 bpaddr - boundary);
5455 if (buf == NULL)
5456 return bpaddr;
5457 buf_len = bpaddr - boundary;
5458 i = 0;
5459 }
5460 }
5461 else
5462 {
5463 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5464 if (buf == NULL)
5465 return bpaddr;
5466 buf_len = bpaddr - boundary;
5467 i = 0;
5468 }
5469
5470 /* Scan forwards. Find the last IT instruction before BPADDR. */
5471 last_it = -1;
5472 last_it_count = 0;
5473 while (i < buf_len)
5474 {
5475 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5476 last_it_count--;
5477 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5478 {
5479 last_it = i;
5480 if (inst1 & 0x0001)
5481 last_it_count = 4;
5482 else if (inst1 & 0x0002)
5483 last_it_count = 3;
5484 else if (inst1 & 0x0004)
5485 last_it_count = 2;
5486 else
5487 last_it_count = 1;
5488 }
5489 i += thumb_insn_size (inst1);
5490 }
5491
5492 xfree (buf);
5493
5494 if (last_it == -1)
5495 /* There wasn't really an IT instruction after all. */
5496 return bpaddr;
5497
5498 if (last_it_count < 1)
5499 /* It was too far away. */
5500 return bpaddr;
5501
5502 /* This really is a trouble spot. Move the breakpoint to the IT
5503 instruction. */
5504 return bpaddr - buf_len + last_it;
5505 }
5506
5507 /* ARM displaced stepping support.
5508
5509 Generally ARM displaced stepping works as follows:
5510
5511 1. When an instruction is to be single-stepped, it is first decoded by
5512 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5513 Depending on the type of instruction, it is then copied to a scratch
5514 location, possibly in a modified form. The copy_* set of functions
5515 performs such modification, as necessary. A breakpoint is placed after
5516 the modified instruction in the scratch space to return control to GDB.
5517 Note in particular that instructions which modify the PC will no longer
5518 do so after modification.
5519
5520 2. The instruction is single-stepped, by setting the PC to the scratch
5521 location address, and resuming. Control returns to GDB when the
5522 breakpoint is hit.
5523
5524 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5525 function used for the current instruction. This function's job is to
5526 put the CPU/memory state back to what it would have been if the
5527 instruction had been executed unmodified in its original location. */
5528
5529 /* NOP instruction (mov r0, r0). */
5530 #define ARM_NOP 0xe1a00000
5531 #define THUMB_NOP 0x4600
5532
5533 /* Helper for register reads for displaced stepping. In particular, this
5534 returns the PC as it would be seen by the instruction at its original
5535 location. */
5536
5537 ULONGEST
5538 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5539 int regno)
5540 {
5541 ULONGEST ret;
5542 CORE_ADDR from = dsc->insn_addr;
5543
5544 if (regno == ARM_PC_REGNUM)
5545 {
5546 /* Compute pipeline offset:
5547 - When executing an ARM instruction, PC reads as the address of the
5548 current instruction plus 8.
5549 - When executing a Thumb instruction, PC reads as the address of the
5550 current instruction plus 4. */
5551
5552 if (!dsc->is_thumb)
5553 from += 8;
5554 else
5555 from += 4;
5556
5557 if (debug_displaced)
5558 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5559 (unsigned long) from);
5560 return (ULONGEST) from;
5561 }
5562 else
5563 {
5564 regcache_cooked_read_unsigned (regs, regno, &ret);
5565 if (debug_displaced)
5566 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5567 regno, (unsigned long) ret);
5568 return ret;
5569 }
5570 }
5571
5572 static int
5573 displaced_in_arm_mode (struct regcache *regs)
5574 {
5575 ULONGEST ps;
5576 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5577
5578 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5579
5580 return (ps & t_bit) == 0;
5581 }
5582
5583 /* Write to the PC as from a branch instruction. */
5584
5585 static void
5586 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5587 ULONGEST val)
5588 {
5589 if (!dsc->is_thumb)
5590 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5591 architecture versions < 6. */
5592 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5593 val & ~(ULONGEST) 0x3);
5594 else
5595 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5596 val & ~(ULONGEST) 0x1);
5597 }
5598
5599 /* Write to the PC as from a branch-exchange instruction. */
5600
5601 static void
5602 bx_write_pc (struct regcache *regs, ULONGEST val)
5603 {
5604 ULONGEST ps;
5605 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5606
5607 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5608
5609 if ((val & 1) == 1)
5610 {
5611 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5612 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5613 }
5614 else if ((val & 2) == 0)
5615 {
5616 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5617 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5618 }
5619 else
5620 {
5621 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5622 mode, align dest to 4 bytes). */
5623 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5624 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5625 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5626 }
5627 }
5628
5629 /* Write to the PC as if from a load instruction. */
5630
5631 static void
5632 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5633 ULONGEST val)
5634 {
5635 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5636 bx_write_pc (regs, val);
5637 else
5638 branch_write_pc (regs, dsc, val);
5639 }
5640
5641 /* Write to the PC as if from an ALU instruction. */
5642
5643 static void
5644 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5645 ULONGEST val)
5646 {
5647 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5648 bx_write_pc (regs, val);
5649 else
5650 branch_write_pc (regs, dsc, val);
5651 }
5652
5653 /* Helper for writing to registers for displaced stepping. Writing to the PC
5654 has a varying effects depending on the instruction which does the write:
5655 this is controlled by the WRITE_PC argument. */
5656
5657 void
5658 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5659 int regno, ULONGEST val, enum pc_write_style write_pc)
5660 {
5661 if (regno == ARM_PC_REGNUM)
5662 {
5663 if (debug_displaced)
5664 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5665 (unsigned long) val);
5666 switch (write_pc)
5667 {
5668 case BRANCH_WRITE_PC:
5669 branch_write_pc (regs, dsc, val);
5670 break;
5671
5672 case BX_WRITE_PC:
5673 bx_write_pc (regs, val);
5674 break;
5675
5676 case LOAD_WRITE_PC:
5677 load_write_pc (regs, dsc, val);
5678 break;
5679
5680 case ALU_WRITE_PC:
5681 alu_write_pc (regs, dsc, val);
5682 break;
5683
5684 case CANNOT_WRITE_PC:
5685 warning (_("Instruction wrote to PC in an unexpected way when "
5686 "single-stepping"));
5687 break;
5688
5689 default:
5690 internal_error (__FILE__, __LINE__,
5691 _("Invalid argument to displaced_write_reg"));
5692 }
5693
5694 dsc->wrote_to_pc = 1;
5695 }
5696 else
5697 {
5698 if (debug_displaced)
5699 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5700 regno, (unsigned long) val);
5701 regcache_cooked_write_unsigned (regs, regno, val);
5702 }
5703 }
5704
5705 /* This function is used to concisely determine if an instruction INSN
5706 references PC. Register fields of interest in INSN should have the
5707 corresponding fields of BITMASK set to 0b1111. The function
5708 returns return 1 if any of these fields in INSN reference the PC
5709 (also 0b1111, r15), else it returns 0. */
5710
5711 static int
5712 insn_references_pc (uint32_t insn, uint32_t bitmask)
5713 {
5714 uint32_t lowbit = 1;
5715
5716 while (bitmask != 0)
5717 {
5718 uint32_t mask;
5719
5720 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5721 ;
5722
5723 if (!lowbit)
5724 break;
5725
5726 mask = lowbit * 0xf;
5727
5728 if ((insn & mask) == mask)
5729 return 1;
5730
5731 bitmask &= ~mask;
5732 }
5733
5734 return 0;
5735 }
5736
5737 /* The simplest copy function. Many instructions have the same effect no
5738 matter what address they are executed at: in those cases, use this. */
5739
5740 static int
5741 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5742 const char *iname, struct displaced_step_closure *dsc)
5743 {
5744 if (debug_displaced)
5745 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5746 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5747 iname);
5748
5749 dsc->modinsn[0] = insn;
5750
5751 return 0;
5752 }
5753
5754 static int
5755 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5756 uint16_t insn2, const char *iname,
5757 struct displaced_step_closure *dsc)
5758 {
5759 if (debug_displaced)
5760 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5761 "opcode/class '%s' unmodified\n", insn1, insn2,
5762 iname);
5763
5764 dsc->modinsn[0] = insn1;
5765 dsc->modinsn[1] = insn2;
5766 dsc->numinsns = 2;
5767
5768 return 0;
5769 }
5770
5771 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5772 modification. */
5773 static int
5774 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5775 const char *iname,
5776 struct displaced_step_closure *dsc)
5777 {
5778 if (debug_displaced)
5779 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5780 "opcode/class '%s' unmodified\n", insn,
5781 iname);
5782
5783 dsc->modinsn[0] = insn;
5784
5785 return 0;
5786 }
5787
5788 /* Preload instructions with immediate offset. */
5789
5790 static void
5791 cleanup_preload (struct gdbarch *gdbarch,
5792 struct regcache *regs, struct displaced_step_closure *dsc)
5793 {
5794 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5795 if (!dsc->u.preload.immed)
5796 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5797 }
5798
5799 static void
5800 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5801 struct displaced_step_closure *dsc, unsigned int rn)
5802 {
5803 ULONGEST rn_val;
5804 /* Preload instructions:
5805
5806 {pli/pld} [rn, #+/-imm]
5807 ->
5808 {pli/pld} [r0, #+/-imm]. */
5809
5810 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5811 rn_val = displaced_read_reg (regs, dsc, rn);
5812 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5813 dsc->u.preload.immed = 1;
5814
5815 dsc->cleanup = &cleanup_preload;
5816 }
5817
5818 static int
5819 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5820 struct displaced_step_closure *dsc)
5821 {
5822 unsigned int rn = bits (insn, 16, 19);
5823
5824 if (!insn_references_pc (insn, 0x000f0000ul))
5825 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5826
5827 if (debug_displaced)
5828 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5829 (unsigned long) insn);
5830
5831 dsc->modinsn[0] = insn & 0xfff0ffff;
5832
5833 install_preload (gdbarch, regs, dsc, rn);
5834
5835 return 0;
5836 }
5837
5838 static int
5839 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5840 struct regcache *regs, struct displaced_step_closure *dsc)
5841 {
5842 unsigned int rn = bits (insn1, 0, 3);
5843 unsigned int u_bit = bit (insn1, 7);
5844 int imm12 = bits (insn2, 0, 11);
5845 ULONGEST pc_val;
5846
5847 if (rn != ARM_PC_REGNUM)
5848 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5849
5850 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5851 PLD (literal) Encoding T1. */
5852 if (debug_displaced)
5853 fprintf_unfiltered (gdb_stdlog,
5854 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5855 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5856 imm12);
5857
5858 if (!u_bit)
5859 imm12 = -1 * imm12;
5860
5861 /* Rewrite instruction {pli/pld} PC imm12 into:
5862 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5863
5864 {pli/pld} [r0, r1]
5865
5866 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5867
5868 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5869 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5870
5871 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5872
5873 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5874 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5875 dsc->u.preload.immed = 0;
5876
5877 /* {pli/pld} [r0, r1] */
5878 dsc->modinsn[0] = insn1 & 0xfff0;
5879 dsc->modinsn[1] = 0xf001;
5880 dsc->numinsns = 2;
5881
5882 dsc->cleanup = &cleanup_preload;
5883 return 0;
5884 }
5885
5886 /* Preload instructions with register offset. */
5887
5888 static void
5889 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5890 struct displaced_step_closure *dsc, unsigned int rn,
5891 unsigned int rm)
5892 {
5893 ULONGEST rn_val, rm_val;
5894
5895 /* Preload register-offset instructions:
5896
5897 {pli/pld} [rn, rm {, shift}]
5898 ->
5899 {pli/pld} [r0, r1 {, shift}]. */
5900
5901 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5902 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5903 rn_val = displaced_read_reg (regs, dsc, rn);
5904 rm_val = displaced_read_reg (regs, dsc, rm);
5905 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5906 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5907 dsc->u.preload.immed = 0;
5908
5909 dsc->cleanup = &cleanup_preload;
5910 }
5911
5912 static int
5913 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5914 struct regcache *regs,
5915 struct displaced_step_closure *dsc)
5916 {
5917 unsigned int rn = bits (insn, 16, 19);
5918 unsigned int rm = bits (insn, 0, 3);
5919
5920
5921 if (!insn_references_pc (insn, 0x000f000ful))
5922 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5923
5924 if (debug_displaced)
5925 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5926 (unsigned long) insn);
5927
5928 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5929
5930 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5931 return 0;
5932 }
5933
5934 /* Copy/cleanup coprocessor load and store instructions. */
5935
5936 static void
5937 cleanup_copro_load_store (struct gdbarch *gdbarch,
5938 struct regcache *regs,
5939 struct displaced_step_closure *dsc)
5940 {
5941 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5942
5943 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5944
5945 if (dsc->u.ldst.writeback)
5946 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5947 }
5948
5949 static void
5950 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5951 struct displaced_step_closure *dsc,
5952 int writeback, unsigned int rn)
5953 {
5954 ULONGEST rn_val;
5955
5956 /* Coprocessor load/store instructions:
5957
5958 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5959 ->
5960 {stc/stc2} [r0, #+/-imm].
5961
5962 ldc/ldc2 are handled identically. */
5963
5964 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5965 rn_val = displaced_read_reg (regs, dsc, rn);
5966 /* PC should be 4-byte aligned. */
5967 rn_val = rn_val & 0xfffffffc;
5968 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5969
5970 dsc->u.ldst.writeback = writeback;
5971 dsc->u.ldst.rn = rn;
5972
5973 dsc->cleanup = &cleanup_copro_load_store;
5974 }
5975
5976 static int
5977 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5978 struct regcache *regs,
5979 struct displaced_step_closure *dsc)
5980 {
5981 unsigned int rn = bits (insn, 16, 19);
5982
5983 if (!insn_references_pc (insn, 0x000f0000ul))
5984 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5985
5986 if (debug_displaced)
5987 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5988 "load/store insn %.8lx\n", (unsigned long) insn);
5989
5990 dsc->modinsn[0] = insn & 0xfff0ffff;
5991
5992 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5993
5994 return 0;
5995 }
5996
5997 static int
5998 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5999 uint16_t insn2, struct regcache *regs,
6000 struct displaced_step_closure *dsc)
6001 {
6002 unsigned int rn = bits (insn1, 0, 3);
6003
6004 if (rn != ARM_PC_REGNUM)
6005 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6006 "copro load/store", dsc);
6007
6008 if (debug_displaced)
6009 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6010 "load/store insn %.4x%.4x\n", insn1, insn2);
6011
6012 dsc->modinsn[0] = insn1 & 0xfff0;
6013 dsc->modinsn[1] = insn2;
6014 dsc->numinsns = 2;
6015
6016 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6017 doesn't support writeback, so pass 0. */
6018 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6019
6020 return 0;
6021 }
6022
6023 /* Clean up branch instructions (actually perform the branch, by setting
6024 PC). */
6025
6026 static void
6027 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6028 struct displaced_step_closure *dsc)
6029 {
6030 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6031 int branch_taken = condition_true (dsc->u.branch.cond, status);
6032 enum pc_write_style write_pc = dsc->u.branch.exchange
6033 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6034
6035 if (!branch_taken)
6036 return;
6037
6038 if (dsc->u.branch.link)
6039 {
6040 /* The value of LR should be the next insn of current one. In order
6041 not to confuse logic hanlding later insn `bx lr', if current insn mode
6042 is Thumb, the bit 0 of LR value should be set to 1. */
6043 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6044
6045 if (dsc->is_thumb)
6046 next_insn_addr |= 0x1;
6047
6048 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6049 CANNOT_WRITE_PC);
6050 }
6051
6052 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6053 }
6054
6055 /* Copy B/BL/BLX instructions with immediate destinations. */
6056
6057 static void
6058 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6059 struct displaced_step_closure *dsc,
6060 unsigned int cond, int exchange, int link, long offset)
6061 {
6062 /* Implement "BL<cond> <label>" as:
6063
6064 Preparation: cond <- instruction condition
6065 Insn: mov r0, r0 (nop)
6066 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6067
6068 B<cond> similar, but don't set r14 in cleanup. */
6069
6070 dsc->u.branch.cond = cond;
6071 dsc->u.branch.link = link;
6072 dsc->u.branch.exchange = exchange;
6073
6074 dsc->u.branch.dest = dsc->insn_addr;
6075 if (link && exchange)
6076 /* For BLX, offset is computed from the Align (PC, 4). */
6077 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6078
6079 if (dsc->is_thumb)
6080 dsc->u.branch.dest += 4 + offset;
6081 else
6082 dsc->u.branch.dest += 8 + offset;
6083
6084 dsc->cleanup = &cleanup_branch;
6085 }
6086 static int
6087 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6088 struct regcache *regs, struct displaced_step_closure *dsc)
6089 {
6090 unsigned int cond = bits (insn, 28, 31);
6091 int exchange = (cond == 0xf);
6092 int link = exchange || bit (insn, 24);
6093 long offset;
6094
6095 if (debug_displaced)
6096 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6097 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6098 (unsigned long) insn);
6099 if (exchange)
6100 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6101 then arrange the switch into Thumb mode. */
6102 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6103 else
6104 offset = bits (insn, 0, 23) << 2;
6105
6106 if (bit (offset, 25))
6107 offset = offset | ~0x3ffffff;
6108
6109 dsc->modinsn[0] = ARM_NOP;
6110
6111 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6112 return 0;
6113 }
6114
6115 static int
6116 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6117 uint16_t insn2, struct regcache *regs,
6118 struct displaced_step_closure *dsc)
6119 {
6120 int link = bit (insn2, 14);
6121 int exchange = link && !bit (insn2, 12);
6122 int cond = INST_AL;
6123 long offset = 0;
6124 int j1 = bit (insn2, 13);
6125 int j2 = bit (insn2, 11);
6126 int s = sbits (insn1, 10, 10);
6127 int i1 = !(j1 ^ bit (insn1, 10));
6128 int i2 = !(j2 ^ bit (insn1, 10));
6129
6130 if (!link && !exchange) /* B */
6131 {
6132 offset = (bits (insn2, 0, 10) << 1);
6133 if (bit (insn2, 12)) /* Encoding T4 */
6134 {
6135 offset |= (bits (insn1, 0, 9) << 12)
6136 | (i2 << 22)
6137 | (i1 << 23)
6138 | (s << 24);
6139 cond = INST_AL;
6140 }
6141 else /* Encoding T3 */
6142 {
6143 offset |= (bits (insn1, 0, 5) << 12)
6144 | (j1 << 18)
6145 | (j2 << 19)
6146 | (s << 20);
6147 cond = bits (insn1, 6, 9);
6148 }
6149 }
6150 else
6151 {
6152 offset = (bits (insn1, 0, 9) << 12);
6153 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6154 offset |= exchange ?
6155 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6156 }
6157
6158 if (debug_displaced)
6159 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6160 "%.4x %.4x with offset %.8lx\n",
6161 link ? (exchange) ? "blx" : "bl" : "b",
6162 insn1, insn2, offset);
6163
6164 dsc->modinsn[0] = THUMB_NOP;
6165
6166 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6167 return 0;
6168 }
6169
6170 /* Copy B Thumb instructions. */
6171 static int
6172 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6173 struct displaced_step_closure *dsc)
6174 {
6175 unsigned int cond = 0;
6176 int offset = 0;
6177 unsigned short bit_12_15 = bits (insn, 12, 15);
6178 CORE_ADDR from = dsc->insn_addr;
6179
6180 if (bit_12_15 == 0xd)
6181 {
6182 /* offset = SignExtend (imm8:0, 32) */
6183 offset = sbits ((insn << 1), 0, 8);
6184 cond = bits (insn, 8, 11);
6185 }
6186 else if (bit_12_15 == 0xe) /* Encoding T2 */
6187 {
6188 offset = sbits ((insn << 1), 0, 11);
6189 cond = INST_AL;
6190 }
6191
6192 if (debug_displaced)
6193 fprintf_unfiltered (gdb_stdlog,
6194 "displaced: copying b immediate insn %.4x "
6195 "with offset %d\n", insn, offset);
6196
6197 dsc->u.branch.cond = cond;
6198 dsc->u.branch.link = 0;
6199 dsc->u.branch.exchange = 0;
6200 dsc->u.branch.dest = from + 4 + offset;
6201
6202 dsc->modinsn[0] = THUMB_NOP;
6203
6204 dsc->cleanup = &cleanup_branch;
6205
6206 return 0;
6207 }
6208
6209 /* Copy BX/BLX with register-specified destinations. */
6210
6211 static void
6212 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6213 struct displaced_step_closure *dsc, int link,
6214 unsigned int cond, unsigned int rm)
6215 {
6216 /* Implement {BX,BLX}<cond> <reg>" as:
6217
6218 Preparation: cond <- instruction condition
6219 Insn: mov r0, r0 (nop)
6220 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6221
6222 Don't set r14 in cleanup for BX. */
6223
6224 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6225
6226 dsc->u.branch.cond = cond;
6227 dsc->u.branch.link = link;
6228
6229 dsc->u.branch.exchange = 1;
6230
6231 dsc->cleanup = &cleanup_branch;
6232 }
6233
6234 static int
6235 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6236 struct regcache *regs, struct displaced_step_closure *dsc)
6237 {
6238 unsigned int cond = bits (insn, 28, 31);
6239 /* BX: x12xxx1x
6240 BLX: x12xxx3x. */
6241 int link = bit (insn, 5);
6242 unsigned int rm = bits (insn, 0, 3);
6243
6244 if (debug_displaced)
6245 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6246 (unsigned long) insn);
6247
6248 dsc->modinsn[0] = ARM_NOP;
6249
6250 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6251 return 0;
6252 }
6253
6254 static int
6255 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6256 struct regcache *regs,
6257 struct displaced_step_closure *dsc)
6258 {
6259 int link = bit (insn, 7);
6260 unsigned int rm = bits (insn, 3, 6);
6261
6262 if (debug_displaced)
6263 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6264 (unsigned short) insn);
6265
6266 dsc->modinsn[0] = THUMB_NOP;
6267
6268 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6269
6270 return 0;
6271 }
6272
6273
6274 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6275
6276 static void
6277 cleanup_alu_imm (struct gdbarch *gdbarch,
6278 struct regcache *regs, struct displaced_step_closure *dsc)
6279 {
6280 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6281 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6282 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6283 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6284 }
6285
6286 static int
6287 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6288 struct displaced_step_closure *dsc)
6289 {
6290 unsigned int rn = bits (insn, 16, 19);
6291 unsigned int rd = bits (insn, 12, 15);
6292 unsigned int op = bits (insn, 21, 24);
6293 int is_mov = (op == 0xd);
6294 ULONGEST rd_val, rn_val;
6295
6296 if (!insn_references_pc (insn, 0x000ff000ul))
6297 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6298
6299 if (debug_displaced)
6300 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6301 "%.8lx\n", is_mov ? "move" : "ALU",
6302 (unsigned long) insn);
6303
6304 /* Instruction is of form:
6305
6306 <op><cond> rd, [rn,] #imm
6307
6308 Rewrite as:
6309
6310 Preparation: tmp1, tmp2 <- r0, r1;
6311 r0, r1 <- rd, rn
6312 Insn: <op><cond> r0, r1, #imm
6313 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6314 */
6315
6316 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6317 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6318 rn_val = displaced_read_reg (regs, dsc, rn);
6319 rd_val = displaced_read_reg (regs, dsc, rd);
6320 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6321 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6322 dsc->rd = rd;
6323
6324 if (is_mov)
6325 dsc->modinsn[0] = insn & 0xfff00fff;
6326 else
6327 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6328
6329 dsc->cleanup = &cleanup_alu_imm;
6330
6331 return 0;
6332 }
6333
6334 static int
6335 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6336 uint16_t insn2, struct regcache *regs,
6337 struct displaced_step_closure *dsc)
6338 {
6339 unsigned int op = bits (insn1, 5, 8);
6340 unsigned int rn, rm, rd;
6341 ULONGEST rd_val, rn_val;
6342
6343 rn = bits (insn1, 0, 3); /* Rn */
6344 rm = bits (insn2, 0, 3); /* Rm */
6345 rd = bits (insn2, 8, 11); /* Rd */
6346
6347 /* This routine is only called for instruction MOV. */
6348 gdb_assert (op == 0x2 && rn == 0xf);
6349
6350 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6351 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6352
6353 if (debug_displaced)
6354 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6355 "ALU", insn1, insn2);
6356
6357 /* Instruction is of form:
6358
6359 <op><cond> rd, [rn,] #imm
6360
6361 Rewrite as:
6362
6363 Preparation: tmp1, tmp2 <- r0, r1;
6364 r0, r1 <- rd, rn
6365 Insn: <op><cond> r0, r1, #imm
6366 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6367 */
6368
6369 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6370 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6371 rn_val = displaced_read_reg (regs, dsc, rn);
6372 rd_val = displaced_read_reg (regs, dsc, rd);
6373 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6374 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6375 dsc->rd = rd;
6376
6377 dsc->modinsn[0] = insn1;
6378 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6379 dsc->numinsns = 2;
6380
6381 dsc->cleanup = &cleanup_alu_imm;
6382
6383 return 0;
6384 }
6385
6386 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6387
6388 static void
6389 cleanup_alu_reg (struct gdbarch *gdbarch,
6390 struct regcache *regs, struct displaced_step_closure *dsc)
6391 {
6392 ULONGEST rd_val;
6393 int i;
6394
6395 rd_val = displaced_read_reg (regs, dsc, 0);
6396
6397 for (i = 0; i < 3; i++)
6398 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6399
6400 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6401 }
6402
6403 static void
6404 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6405 struct displaced_step_closure *dsc,
6406 unsigned int rd, unsigned int rn, unsigned int rm)
6407 {
6408 ULONGEST rd_val, rn_val, rm_val;
6409
6410 /* Instruction is of form:
6411
6412 <op><cond> rd, [rn,] rm [, <shift>]
6413
6414 Rewrite as:
6415
6416 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6417 r0, r1, r2 <- rd, rn, rm
6418 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6419 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6420 */
6421
6422 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6423 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6424 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6425 rd_val = displaced_read_reg (regs, dsc, rd);
6426 rn_val = displaced_read_reg (regs, dsc, rn);
6427 rm_val = displaced_read_reg (regs, dsc, rm);
6428 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6429 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6430 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6431 dsc->rd = rd;
6432
6433 dsc->cleanup = &cleanup_alu_reg;
6434 }
6435
6436 static int
6437 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6438 struct displaced_step_closure *dsc)
6439 {
6440 unsigned int op = bits (insn, 21, 24);
6441 int is_mov = (op == 0xd);
6442
6443 if (!insn_references_pc (insn, 0x000ff00ful))
6444 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6445
6446 if (debug_displaced)
6447 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6448 is_mov ? "move" : "ALU", (unsigned long) insn);
6449
6450 if (is_mov)
6451 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6452 else
6453 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6454
6455 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6456 bits (insn, 0, 3));
6457 return 0;
6458 }
6459
6460 static int
6461 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6462 struct regcache *regs,
6463 struct displaced_step_closure *dsc)
6464 {
6465 unsigned rm, rd;
6466
6467 rm = bits (insn, 3, 6);
6468 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6469
6470 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6471 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6472
6473 if (debug_displaced)
6474 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
6475 (unsigned short) insn);
6476
6477 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6478
6479 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6480
6481 return 0;
6482 }
6483
6484 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6485
6486 static void
6487 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6488 struct regcache *regs,
6489 struct displaced_step_closure *dsc)
6490 {
6491 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6492 int i;
6493
6494 for (i = 0; i < 4; i++)
6495 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6496
6497 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6498 }
6499
6500 static void
6501 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6502 struct displaced_step_closure *dsc,
6503 unsigned int rd, unsigned int rn, unsigned int rm,
6504 unsigned rs)
6505 {
6506 int i;
6507 ULONGEST rd_val, rn_val, rm_val, rs_val;
6508
6509 /* Instruction is of form:
6510
6511 <op><cond> rd, [rn,] rm, <shift> rs
6512
6513 Rewrite as:
6514
6515 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6516 r0, r1, r2, r3 <- rd, rn, rm, rs
6517 Insn: <op><cond> r0, r1, r2, <shift> r3
6518 Cleanup: tmp5 <- r0
6519 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6520 rd <- tmp5
6521 */
6522
6523 for (i = 0; i < 4; i++)
6524 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6525
6526 rd_val = displaced_read_reg (regs, dsc, rd);
6527 rn_val = displaced_read_reg (regs, dsc, rn);
6528 rm_val = displaced_read_reg (regs, dsc, rm);
6529 rs_val = displaced_read_reg (regs, dsc, rs);
6530 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6531 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6532 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6533 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6534 dsc->rd = rd;
6535 dsc->cleanup = &cleanup_alu_shifted_reg;
6536 }
6537
6538 static int
6539 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6540 struct regcache *regs,
6541 struct displaced_step_closure *dsc)
6542 {
6543 unsigned int op = bits (insn, 21, 24);
6544 int is_mov = (op == 0xd);
6545 unsigned int rd, rn, rm, rs;
6546
6547 if (!insn_references_pc (insn, 0x000fff0ful))
6548 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6549
6550 if (debug_displaced)
6551 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6552 "%.8lx\n", is_mov ? "move" : "ALU",
6553 (unsigned long) insn);
6554
6555 rn = bits (insn, 16, 19);
6556 rm = bits (insn, 0, 3);
6557 rs = bits (insn, 8, 11);
6558 rd = bits (insn, 12, 15);
6559
6560 if (is_mov)
6561 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6562 else
6563 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6564
6565 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6566
6567 return 0;
6568 }
6569
6570 /* Clean up load instructions. */
6571
6572 static void
6573 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6574 struct displaced_step_closure *dsc)
6575 {
6576 ULONGEST rt_val, rt_val2 = 0, rn_val;
6577
6578 rt_val = displaced_read_reg (regs, dsc, 0);
6579 if (dsc->u.ldst.xfersize == 8)
6580 rt_val2 = displaced_read_reg (regs, dsc, 1);
6581 rn_val = displaced_read_reg (regs, dsc, 2);
6582
6583 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6584 if (dsc->u.ldst.xfersize > 4)
6585 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6586 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6587 if (!dsc->u.ldst.immed)
6588 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6589
6590 /* Handle register writeback. */
6591 if (dsc->u.ldst.writeback)
6592 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6593 /* Put result in right place. */
6594 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6595 if (dsc->u.ldst.xfersize == 8)
6596 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6597 }
6598
6599 /* Clean up store instructions. */
6600
6601 static void
6602 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6603 struct displaced_step_closure *dsc)
6604 {
6605 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6606
6607 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6608 if (dsc->u.ldst.xfersize > 4)
6609 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6610 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6611 if (!dsc->u.ldst.immed)
6612 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6613 if (!dsc->u.ldst.restore_r4)
6614 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6615
6616 /* Writeback. */
6617 if (dsc->u.ldst.writeback)
6618 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6619 }
6620
6621 /* Copy "extra" load/store instructions. These are halfword/doubleword
6622 transfers, which have a different encoding to byte/word transfers. */
6623
6624 static int
6625 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6626 struct regcache *regs, struct displaced_step_closure *dsc)
6627 {
6628 unsigned int op1 = bits (insn, 20, 24);
6629 unsigned int op2 = bits (insn, 5, 6);
6630 unsigned int rt = bits (insn, 12, 15);
6631 unsigned int rn = bits (insn, 16, 19);
6632 unsigned int rm = bits (insn, 0, 3);
6633 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6634 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6635 int immed = (op1 & 0x4) != 0;
6636 int opcode;
6637 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6638
6639 if (!insn_references_pc (insn, 0x000ff00ful))
6640 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6641
6642 if (debug_displaced)
6643 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6644 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6645 (unsigned long) insn);
6646
6647 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6648
6649 if (opcode < 0)
6650 internal_error (__FILE__, __LINE__,
6651 _("copy_extra_ld_st: instruction decode error"));
6652
6653 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6654 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6655 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6656 if (!immed)
6657 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6658
6659 rt_val = displaced_read_reg (regs, dsc, rt);
6660 if (bytesize[opcode] == 8)
6661 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6662 rn_val = displaced_read_reg (regs, dsc, rn);
6663 if (!immed)
6664 rm_val = displaced_read_reg (regs, dsc, rm);
6665
6666 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6667 if (bytesize[opcode] == 8)
6668 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6669 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6670 if (!immed)
6671 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6672
6673 dsc->rd = rt;
6674 dsc->u.ldst.xfersize = bytesize[opcode];
6675 dsc->u.ldst.rn = rn;
6676 dsc->u.ldst.immed = immed;
6677 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6678 dsc->u.ldst.restore_r4 = 0;
6679
6680 if (immed)
6681 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6682 ->
6683 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6684 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6685 else
6686 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6687 ->
6688 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6689 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6690
6691 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6692
6693 return 0;
6694 }
6695
6696 /* Copy byte/half word/word loads and stores. */
6697
6698 static void
6699 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6700 struct displaced_step_closure *dsc, int load,
6701 int immed, int writeback, int size, int usermode,
6702 int rt, int rm, int rn)
6703 {
6704 ULONGEST rt_val, rn_val, rm_val = 0;
6705
6706 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6707 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6708 if (!immed)
6709 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6710 if (!load)
6711 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6712
6713 rt_val = displaced_read_reg (regs, dsc, rt);
6714 rn_val = displaced_read_reg (regs, dsc, rn);
6715 if (!immed)
6716 rm_val = displaced_read_reg (regs, dsc, rm);
6717
6718 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6719 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6720 if (!immed)
6721 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6722 dsc->rd = rt;
6723 dsc->u.ldst.xfersize = size;
6724 dsc->u.ldst.rn = rn;
6725 dsc->u.ldst.immed = immed;
6726 dsc->u.ldst.writeback = writeback;
6727
6728 /* To write PC we can do:
6729
6730 Before this sequence of instructions:
6731 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6732 r2 is the Rn value got from dispalced_read_reg.
6733
6734 Insn1: push {pc} Write address of STR instruction + offset on stack
6735 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6736 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6737 = addr(Insn1) + offset - addr(Insn3) - 8
6738 = offset - 16
6739 Insn4: add r4, r4, #8 r4 = offset - 8
6740 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6741 = from + offset
6742 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6743
6744 Otherwise we don't know what value to write for PC, since the offset is
6745 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6746 of this can be found in Section "Saving from r15" in
6747 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6748
6749 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6750 }
6751
6752
6753 static int
6754 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6755 uint16_t insn2, struct regcache *regs,
6756 struct displaced_step_closure *dsc, int size)
6757 {
6758 unsigned int u_bit = bit (insn1, 7);
6759 unsigned int rt = bits (insn2, 12, 15);
6760 int imm12 = bits (insn2, 0, 11);
6761 ULONGEST pc_val;
6762
6763 if (debug_displaced)
6764 fprintf_unfiltered (gdb_stdlog,
6765 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6766 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6767 imm12);
6768
6769 if (!u_bit)
6770 imm12 = -1 * imm12;
6771
6772 /* Rewrite instruction LDR Rt imm12 into:
6773
6774 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6775
6776 LDR R0, R2, R3,
6777
6778 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6779
6780
6781 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6782 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6783 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6784
6785 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6786
6787 pc_val = pc_val & 0xfffffffc;
6788
6789 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6790 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6791
6792 dsc->rd = rt;
6793
6794 dsc->u.ldst.xfersize = size;
6795 dsc->u.ldst.immed = 0;
6796 dsc->u.ldst.writeback = 0;
6797 dsc->u.ldst.restore_r4 = 0;
6798
6799 /* LDR R0, R2, R3 */
6800 dsc->modinsn[0] = 0xf852;
6801 dsc->modinsn[1] = 0x3;
6802 dsc->numinsns = 2;
6803
6804 dsc->cleanup = &cleanup_load;
6805
6806 return 0;
6807 }
6808
6809 static int
6810 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6811 uint16_t insn2, struct regcache *regs,
6812 struct displaced_step_closure *dsc,
6813 int writeback, int immed)
6814 {
6815 unsigned int rt = bits (insn2, 12, 15);
6816 unsigned int rn = bits (insn1, 0, 3);
6817 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6818 /* In LDR (register), there is also a register Rm, which is not allowed to
6819 be PC, so we don't have to check it. */
6820
6821 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6823 dsc);
6824
6825 if (debug_displaced)
6826 fprintf_unfiltered (gdb_stdlog,
6827 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6828 rt, rn, insn1, insn2);
6829
6830 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6831 0, rt, rm, rn);
6832
6833 dsc->u.ldst.restore_r4 = 0;
6834
6835 if (immed)
6836 /* ldr[b]<cond> rt, [rn, #imm], etc.
6837 ->
6838 ldr[b]<cond> r0, [r2, #imm]. */
6839 {
6840 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6841 dsc->modinsn[1] = insn2 & 0x0fff;
6842 }
6843 else
6844 /* ldr[b]<cond> rt, [rn, rm], etc.
6845 ->
6846 ldr[b]<cond> r0, [r2, r3]. */
6847 {
6848 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6849 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6850 }
6851
6852 dsc->numinsns = 2;
6853
6854 return 0;
6855 }
6856
6857
6858 static int
6859 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6860 struct regcache *regs,
6861 struct displaced_step_closure *dsc,
6862 int load, int size, int usermode)
6863 {
6864 int immed = !bit (insn, 25);
6865 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6866 unsigned int rt = bits (insn, 12, 15);
6867 unsigned int rn = bits (insn, 16, 19);
6868 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6869
6870 if (!insn_references_pc (insn, 0x000ff00ful))
6871 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6872
6873 if (debug_displaced)
6874 fprintf_unfiltered (gdb_stdlog,
6875 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6876 load ? (size == 1 ? "ldrb" : "ldr")
6877 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6878 rt, rn,
6879 (unsigned long) insn);
6880
6881 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6882 usermode, rt, rm, rn);
6883
6884 if (load || rt != ARM_PC_REGNUM)
6885 {
6886 dsc->u.ldst.restore_r4 = 0;
6887
6888 if (immed)
6889 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6890 ->
6891 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6892 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6893 else
6894 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6895 ->
6896 {ldr,str}[b]<cond> r0, [r2, r3]. */
6897 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6898 }
6899 else
6900 {
6901 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6902 dsc->u.ldst.restore_r4 = 1;
6903 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6904 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6905 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6906 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6907 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6908
6909 /* As above. */
6910 if (immed)
6911 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6912 else
6913 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6914
6915 dsc->numinsns = 6;
6916 }
6917
6918 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6919
6920 return 0;
6921 }
6922
6923 /* Cleanup LDM instructions with fully-populated register list. This is an
6924 unfortunate corner case: it's impossible to implement correctly by modifying
6925 the instruction. The issue is as follows: we have an instruction,
6926
6927 ldm rN, {r0-r15}
6928
6929 which we must rewrite to avoid loading PC. A possible solution would be to
6930 do the load in two halves, something like (with suitable cleanup
6931 afterwards):
6932
6933 mov r8, rN
6934 ldm[id][ab] r8!, {r0-r7}
6935 str r7, <temp>
6936 ldm[id][ab] r8, {r7-r14}
6937 <bkpt>
6938
6939 but at present there's no suitable place for <temp>, since the scratch space
6940 is overwritten before the cleanup routine is called. For now, we simply
6941 emulate the instruction. */
6942
6943 static void
6944 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6945 struct displaced_step_closure *dsc)
6946 {
6947 int inc = dsc->u.block.increment;
6948 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6949 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6950 uint32_t regmask = dsc->u.block.regmask;
6951 int regno = inc ? 0 : 15;
6952 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6953 int exception_return = dsc->u.block.load && dsc->u.block.user
6954 && (regmask & 0x8000) != 0;
6955 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6956 int do_transfer = condition_true (dsc->u.block.cond, status);
6957 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6958
6959 if (!do_transfer)
6960 return;
6961
6962 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6963 sensible we can do here. Complain loudly. */
6964 if (exception_return)
6965 error (_("Cannot single-step exception return"));
6966
6967 /* We don't handle any stores here for now. */
6968 gdb_assert (dsc->u.block.load != 0);
6969
6970 if (debug_displaced)
6971 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6972 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6973 dsc->u.block.increment ? "inc" : "dec",
6974 dsc->u.block.before ? "before" : "after");
6975
6976 while (regmask)
6977 {
6978 uint32_t memword;
6979
6980 if (inc)
6981 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6982 regno++;
6983 else
6984 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6985 regno--;
6986
6987 xfer_addr += bump_before;
6988
6989 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6990 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6991
6992 xfer_addr += bump_after;
6993
6994 regmask &= ~(1 << regno);
6995 }
6996
6997 if (dsc->u.block.writeback)
6998 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6999 CANNOT_WRITE_PC);
7000 }
7001
7002 /* Clean up an STM which included the PC in the register list. */
7003
7004 static void
7005 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7006 struct displaced_step_closure *dsc)
7007 {
7008 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7009 int store_executed = condition_true (dsc->u.block.cond, status);
7010 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7011 CORE_ADDR stm_insn_addr;
7012 uint32_t pc_val;
7013 long offset;
7014 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7015
7016 /* If condition code fails, there's nothing else to do. */
7017 if (!store_executed)
7018 return;
7019
7020 if (dsc->u.block.increment)
7021 {
7022 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7023
7024 if (dsc->u.block.before)
7025 pc_stored_at += 4;
7026 }
7027 else
7028 {
7029 pc_stored_at = dsc->u.block.xfer_addr;
7030
7031 if (dsc->u.block.before)
7032 pc_stored_at -= 4;
7033 }
7034
7035 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7036 stm_insn_addr = dsc->scratch_base;
7037 offset = pc_val - stm_insn_addr;
7038
7039 if (debug_displaced)
7040 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7041 "STM instruction\n", offset);
7042
7043 /* Rewrite the stored PC to the proper value for the non-displaced original
7044 instruction. */
7045 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7046 dsc->insn_addr + offset);
7047 }
7048
7049 /* Clean up an LDM which includes the PC in the register list. We clumped all
7050 the registers in the transferred list into a contiguous range r0...rX (to
7051 avoid loading PC directly and losing control of the debugged program), so we
7052 must undo that here. */
7053
7054 static void
7055 cleanup_block_load_pc (struct gdbarch *gdbarch,
7056 struct regcache *regs,
7057 struct displaced_step_closure *dsc)
7058 {
7059 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7060 int load_executed = condition_true (dsc->u.block.cond, status);
7061 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7062 unsigned int regs_loaded = bitcount (mask);
7063 unsigned int num_to_shuffle = regs_loaded, clobbered;
7064
7065 /* The method employed here will fail if the register list is fully populated
7066 (we need to avoid loading PC directly). */
7067 gdb_assert (num_to_shuffle < 16);
7068
7069 if (!load_executed)
7070 return;
7071
7072 clobbered = (1 << num_to_shuffle) - 1;
7073
7074 while (num_to_shuffle > 0)
7075 {
7076 if ((mask & (1 << write_reg)) != 0)
7077 {
7078 unsigned int read_reg = num_to_shuffle - 1;
7079
7080 if (read_reg != write_reg)
7081 {
7082 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7083 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7084 if (debug_displaced)
7085 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7086 "loaded register r%d to r%d\n"), read_reg,
7087 write_reg);
7088 }
7089 else if (debug_displaced)
7090 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7091 "r%d already in the right place\n"),
7092 write_reg);
7093
7094 clobbered &= ~(1 << write_reg);
7095
7096 num_to_shuffle--;
7097 }
7098
7099 write_reg--;
7100 }
7101
7102 /* Restore any registers we scribbled over. */
7103 for (write_reg = 0; clobbered != 0; write_reg++)
7104 {
7105 if ((clobbered & (1 << write_reg)) != 0)
7106 {
7107 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7108 CANNOT_WRITE_PC);
7109 if (debug_displaced)
7110 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7111 "clobbered register r%d\n"), write_reg);
7112 clobbered &= ~(1 << write_reg);
7113 }
7114 }
7115
7116 /* Perform register writeback manually. */
7117 if (dsc->u.block.writeback)
7118 {
7119 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7120
7121 if (dsc->u.block.increment)
7122 new_rn_val += regs_loaded * 4;
7123 else
7124 new_rn_val -= regs_loaded * 4;
7125
7126 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7127 CANNOT_WRITE_PC);
7128 }
7129 }
7130
7131 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7132 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7133
7134 static int
7135 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7136 struct regcache *regs,
7137 struct displaced_step_closure *dsc)
7138 {
7139 int load = bit (insn, 20);
7140 int user = bit (insn, 22);
7141 int increment = bit (insn, 23);
7142 int before = bit (insn, 24);
7143 int writeback = bit (insn, 21);
7144 int rn = bits (insn, 16, 19);
7145
7146 /* Block transfers which don't mention PC can be run directly
7147 out-of-line. */
7148 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7149 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7150
7151 if (rn == ARM_PC_REGNUM)
7152 {
7153 warning (_("displaced: Unpredictable LDM or STM with "
7154 "base register r15"));
7155 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7156 }
7157
7158 if (debug_displaced)
7159 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7160 "%.8lx\n", (unsigned long) insn);
7161
7162 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7163 dsc->u.block.rn = rn;
7164
7165 dsc->u.block.load = load;
7166 dsc->u.block.user = user;
7167 dsc->u.block.increment = increment;
7168 dsc->u.block.before = before;
7169 dsc->u.block.writeback = writeback;
7170 dsc->u.block.cond = bits (insn, 28, 31);
7171
7172 dsc->u.block.regmask = insn & 0xffff;
7173
7174 if (load)
7175 {
7176 if ((insn & 0xffff) == 0xffff)
7177 {
7178 /* LDM with a fully-populated register list. This case is
7179 particularly tricky. Implement for now by fully emulating the
7180 instruction (which might not behave perfectly in all cases, but
7181 these instructions should be rare enough for that not to matter
7182 too much). */
7183 dsc->modinsn[0] = ARM_NOP;
7184
7185 dsc->cleanup = &cleanup_block_load_all;
7186 }
7187 else
7188 {
7189 /* LDM of a list of registers which includes PC. Implement by
7190 rewriting the list of registers to be transferred into a
7191 contiguous chunk r0...rX before doing the transfer, then shuffling
7192 registers into the correct places in the cleanup routine. */
7193 unsigned int regmask = insn & 0xffff;
7194 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7195 unsigned int to = 0, from = 0, i, new_rn;
7196
7197 for (i = 0; i < num_in_list; i++)
7198 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7199
7200 /* Writeback makes things complicated. We need to avoid clobbering
7201 the base register with one of the registers in our modified
7202 register list, but just using a different register can't work in
7203 all cases, e.g.:
7204
7205 ldm r14!, {r0-r13,pc}
7206
7207 which would need to be rewritten as:
7208
7209 ldm rN!, {r0-r14}
7210
7211 but that can't work, because there's no free register for N.
7212
7213 Solve this by turning off the writeback bit, and emulating
7214 writeback manually in the cleanup routine. */
7215
7216 if (writeback)
7217 insn &= ~(1 << 21);
7218
7219 new_regmask = (1 << num_in_list) - 1;
7220
7221 if (debug_displaced)
7222 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7223 "{..., pc}: original reg list %.4x, modified "
7224 "list %.4x\n"), rn, writeback ? "!" : "",
7225 (int) insn & 0xffff, new_regmask);
7226
7227 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7228
7229 dsc->cleanup = &cleanup_block_load_pc;
7230 }
7231 }
7232 else
7233 {
7234 /* STM of a list of registers which includes PC. Run the instruction
7235 as-is, but out of line: this will store the wrong value for the PC,
7236 so we must manually fix up the memory in the cleanup routine.
7237 Doing things this way has the advantage that we can auto-detect
7238 the offset of the PC write (which is architecture-dependent) in
7239 the cleanup routine. */
7240 dsc->modinsn[0] = insn;
7241
7242 dsc->cleanup = &cleanup_block_store_pc;
7243 }
7244
7245 return 0;
7246 }
7247
7248 static int
7249 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7250 struct regcache *regs,
7251 struct displaced_step_closure *dsc)
7252 {
7253 int rn = bits (insn1, 0, 3);
7254 int load = bit (insn1, 4);
7255 int writeback = bit (insn1, 5);
7256
7257 /* Block transfers which don't mention PC can be run directly
7258 out-of-line. */
7259 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7260 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7261
7262 if (rn == ARM_PC_REGNUM)
7263 {
7264 warning (_("displaced: Unpredictable LDM or STM with "
7265 "base register r15"));
7266 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7267 "unpredictable ldm/stm", dsc);
7268 }
7269
7270 if (debug_displaced)
7271 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7272 "%.4x%.4x\n", insn1, insn2);
7273
7274 /* Clear bit 13, since it should be always zero. */
7275 dsc->u.block.regmask = (insn2 & 0xdfff);
7276 dsc->u.block.rn = rn;
7277
7278 dsc->u.block.load = load;
7279 dsc->u.block.user = 0;
7280 dsc->u.block.increment = bit (insn1, 7);
7281 dsc->u.block.before = bit (insn1, 8);
7282 dsc->u.block.writeback = writeback;
7283 dsc->u.block.cond = INST_AL;
7284 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7285
7286 if (load)
7287 {
7288 if (dsc->u.block.regmask == 0xffff)
7289 {
7290 /* This branch is impossible to happen. */
7291 gdb_assert (0);
7292 }
7293 else
7294 {
7295 unsigned int regmask = dsc->u.block.regmask;
7296 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7297 unsigned int to = 0, from = 0, i, new_rn;
7298
7299 for (i = 0; i < num_in_list; i++)
7300 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7301
7302 if (writeback)
7303 insn1 &= ~(1 << 5);
7304
7305 new_regmask = (1 << num_in_list) - 1;
7306
7307 if (debug_displaced)
7308 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7309 "{..., pc}: original reg list %.4x, modified "
7310 "list %.4x\n"), rn, writeback ? "!" : "",
7311 (int) dsc->u.block.regmask, new_regmask);
7312
7313 dsc->modinsn[0] = insn1;
7314 dsc->modinsn[1] = (new_regmask & 0xffff);
7315 dsc->numinsns = 2;
7316
7317 dsc->cleanup = &cleanup_block_load_pc;
7318 }
7319 }
7320 else
7321 {
7322 dsc->modinsn[0] = insn1;
7323 dsc->modinsn[1] = insn2;
7324 dsc->numinsns = 2;
7325 dsc->cleanup = &cleanup_block_store_pc;
7326 }
7327 return 0;
7328 }
7329
7330 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7331 for Linux, where some SVC instructions must be treated specially. */
7332
7333 static void
7334 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7335 struct displaced_step_closure *dsc)
7336 {
7337 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7338
7339 if (debug_displaced)
7340 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7341 "%.8lx\n", (unsigned long) resume_addr);
7342
7343 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7344 }
7345
7346
7347 /* Common copy routine for svc instruciton. */
7348
7349 static int
7350 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7351 struct displaced_step_closure *dsc)
7352 {
7353 /* Preparation: none.
7354 Insn: unmodified svc.
7355 Cleanup: pc <- insn_addr + insn_size. */
7356
7357 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7358 instruction. */
7359 dsc->wrote_to_pc = 1;
7360
7361 /* Allow OS-specific code to override SVC handling. */
7362 if (dsc->u.svc.copy_svc_os)
7363 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7364 else
7365 {
7366 dsc->cleanup = &cleanup_svc;
7367 return 0;
7368 }
7369 }
7370
7371 static int
7372 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7373 struct regcache *regs, struct displaced_step_closure *dsc)
7374 {
7375
7376 if (debug_displaced)
7377 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7378 (unsigned long) insn);
7379
7380 dsc->modinsn[0] = insn;
7381
7382 return install_svc (gdbarch, regs, dsc);
7383 }
7384
7385 static int
7386 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7387 struct regcache *regs, struct displaced_step_closure *dsc)
7388 {
7389
7390 if (debug_displaced)
7391 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7392 insn);
7393
7394 dsc->modinsn[0] = insn;
7395
7396 return install_svc (gdbarch, regs, dsc);
7397 }
7398
7399 /* Copy undefined instructions. */
7400
7401 static int
7402 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7403 struct displaced_step_closure *dsc)
7404 {
7405 if (debug_displaced)
7406 fprintf_unfiltered (gdb_stdlog,
7407 "displaced: copying undefined insn %.8lx\n",
7408 (unsigned long) insn);
7409
7410 dsc->modinsn[0] = insn;
7411
7412 return 0;
7413 }
7414
7415 static int
7416 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7417 struct displaced_step_closure *dsc)
7418 {
7419
7420 if (debug_displaced)
7421 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7422 "%.4x %.4x\n", (unsigned short) insn1,
7423 (unsigned short) insn2);
7424
7425 dsc->modinsn[0] = insn1;
7426 dsc->modinsn[1] = insn2;
7427 dsc->numinsns = 2;
7428
7429 return 0;
7430 }
7431
7432 /* Copy unpredictable instructions. */
7433
7434 static int
7435 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7436 struct displaced_step_closure *dsc)
7437 {
7438 if (debug_displaced)
7439 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7440 "%.8lx\n", (unsigned long) insn);
7441
7442 dsc->modinsn[0] = insn;
7443
7444 return 0;
7445 }
7446
7447 /* The decode_* functions are instruction decoding helpers. They mostly follow
7448 the presentation in the ARM ARM. */
7449
7450 static int
7451 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7452 struct regcache *regs,
7453 struct displaced_step_closure *dsc)
7454 {
7455 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7456 unsigned int rn = bits (insn, 16, 19);
7457
7458 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7459 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7460 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7461 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7462 else if ((op1 & 0x60) == 0x20)
7463 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7464 else if ((op1 & 0x71) == 0x40)
7465 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7466 dsc);
7467 else if ((op1 & 0x77) == 0x41)
7468 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7469 else if ((op1 & 0x77) == 0x45)
7470 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7471 else if ((op1 & 0x77) == 0x51)
7472 {
7473 if (rn != 0xf)
7474 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7475 else
7476 return arm_copy_unpred (gdbarch, insn, dsc);
7477 }
7478 else if ((op1 & 0x77) == 0x55)
7479 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7480 else if (op1 == 0x57)
7481 switch (op2)
7482 {
7483 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7484 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7485 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7486 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7487 default: return arm_copy_unpred (gdbarch, insn, dsc);
7488 }
7489 else if ((op1 & 0x63) == 0x43)
7490 return arm_copy_unpred (gdbarch, insn, dsc);
7491 else if ((op2 & 0x1) == 0x0)
7492 switch (op1 & ~0x80)
7493 {
7494 case 0x61:
7495 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7496 case 0x65:
7497 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7498 case 0x71: case 0x75:
7499 /* pld/pldw reg. */
7500 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7501 case 0x63: case 0x67: case 0x73: case 0x77:
7502 return arm_copy_unpred (gdbarch, insn, dsc);
7503 default:
7504 return arm_copy_undef (gdbarch, insn, dsc);
7505 }
7506 else
7507 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7508 }
7509
7510 static int
7511 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7512 struct regcache *regs,
7513 struct displaced_step_closure *dsc)
7514 {
7515 if (bit (insn, 27) == 0)
7516 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7517 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7518 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7519 {
7520 case 0x0: case 0x2:
7521 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7522
7523 case 0x1: case 0x3:
7524 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7525
7526 case 0x4: case 0x5: case 0x6: case 0x7:
7527 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7528
7529 case 0x8:
7530 switch ((insn & 0xe00000) >> 21)
7531 {
7532 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7533 /* stc/stc2. */
7534 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7535
7536 case 0x2:
7537 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7538
7539 default:
7540 return arm_copy_undef (gdbarch, insn, dsc);
7541 }
7542
7543 case 0x9:
7544 {
7545 int rn_f = (bits (insn, 16, 19) == 0xf);
7546 switch ((insn & 0xe00000) >> 21)
7547 {
7548 case 0x1: case 0x3:
7549 /* ldc/ldc2 imm (undefined for rn == pc). */
7550 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7551 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7552
7553 case 0x2:
7554 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7555
7556 case 0x4: case 0x5: case 0x6: case 0x7:
7557 /* ldc/ldc2 lit (undefined for rn != pc). */
7558 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7559 : arm_copy_undef (gdbarch, insn, dsc);
7560
7561 default:
7562 return arm_copy_undef (gdbarch, insn, dsc);
7563 }
7564 }
7565
7566 case 0xa:
7567 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7568
7569 case 0xb:
7570 if (bits (insn, 16, 19) == 0xf)
7571 /* ldc/ldc2 lit. */
7572 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7573 else
7574 return arm_copy_undef (gdbarch, insn, dsc);
7575
7576 case 0xc:
7577 if (bit (insn, 4))
7578 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7579 else
7580 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7581
7582 case 0xd:
7583 if (bit (insn, 4))
7584 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7585 else
7586 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7587
7588 default:
7589 return arm_copy_undef (gdbarch, insn, dsc);
7590 }
7591 }
7592
7593 /* Decode miscellaneous instructions in dp/misc encoding space. */
7594
7595 static int
7596 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7597 struct regcache *regs,
7598 struct displaced_step_closure *dsc)
7599 {
7600 unsigned int op2 = bits (insn, 4, 6);
7601 unsigned int op = bits (insn, 21, 22);
7602 unsigned int op1 = bits (insn, 16, 19);
7603
7604 switch (op2)
7605 {
7606 case 0x0:
7607 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7608
7609 case 0x1:
7610 if (op == 0x1) /* bx. */
7611 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7612 else if (op == 0x3)
7613 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7614 else
7615 return arm_copy_undef (gdbarch, insn, dsc);
7616
7617 case 0x2:
7618 if (op == 0x1)
7619 /* Not really supported. */
7620 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7621 else
7622 return arm_copy_undef (gdbarch, insn, dsc);
7623
7624 case 0x3:
7625 if (op == 0x1)
7626 return arm_copy_bx_blx_reg (gdbarch, insn,
7627 regs, dsc); /* blx register. */
7628 else
7629 return arm_copy_undef (gdbarch, insn, dsc);
7630
7631 case 0x5:
7632 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7633
7634 case 0x7:
7635 if (op == 0x1)
7636 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7637 else if (op == 0x3)
7638 /* Not really supported. */
7639 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7640
7641 default:
7642 return arm_copy_undef (gdbarch, insn, dsc);
7643 }
7644 }
7645
7646 static int
7647 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7648 struct regcache *regs,
7649 struct displaced_step_closure *dsc)
7650 {
7651 if (bit (insn, 25))
7652 switch (bits (insn, 20, 24))
7653 {
7654 case 0x10:
7655 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7656
7657 case 0x14:
7658 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7659
7660 case 0x12: case 0x16:
7661 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7662
7663 default:
7664 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7665 }
7666 else
7667 {
7668 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7669
7670 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7671 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7672 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7673 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7674 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7675 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7676 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7677 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7678 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7679 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7680 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7681 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7682 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7683 /* 2nd arg means "unpriveleged". */
7684 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7685 dsc);
7686 }
7687
7688 /* Should be unreachable. */
7689 return 1;
7690 }
7691
7692 static int
7693 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7694 struct regcache *regs,
7695 struct displaced_step_closure *dsc)
7696 {
7697 int a = bit (insn, 25), b = bit (insn, 4);
7698 uint32_t op1 = bits (insn, 20, 24);
7699 int rn_f = bits (insn, 16, 19) == 0xf;
7700
7701 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7702 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7703 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7704 else if ((!a && (op1 & 0x17) == 0x02)
7705 || (a && (op1 & 0x17) == 0x02 && !b))
7706 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7707 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7708 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7709 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7710 else if ((!a && (op1 & 0x17) == 0x03)
7711 || (a && (op1 & 0x17) == 0x03 && !b))
7712 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7713 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7714 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7715 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7716 else if ((!a && (op1 & 0x17) == 0x06)
7717 || (a && (op1 & 0x17) == 0x06 && !b))
7718 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7719 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7720 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7721 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7722 else if ((!a && (op1 & 0x17) == 0x07)
7723 || (a && (op1 & 0x17) == 0x07 && !b))
7724 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7725
7726 /* Should be unreachable. */
7727 return 1;
7728 }
7729
7730 static int
7731 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7732 struct displaced_step_closure *dsc)
7733 {
7734 switch (bits (insn, 20, 24))
7735 {
7736 case 0x00: case 0x01: case 0x02: case 0x03:
7737 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7738
7739 case 0x04: case 0x05: case 0x06: case 0x07:
7740 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7741
7742 case 0x08: case 0x09: case 0x0a: case 0x0b:
7743 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7744 return arm_copy_unmodified (gdbarch, insn,
7745 "decode/pack/unpack/saturate/reverse", dsc);
7746
7747 case 0x18:
7748 if (bits (insn, 5, 7) == 0) /* op2. */
7749 {
7750 if (bits (insn, 12, 15) == 0xf)
7751 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7752 else
7753 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7754 }
7755 else
7756 return arm_copy_undef (gdbarch, insn, dsc);
7757
7758 case 0x1a: case 0x1b:
7759 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7760 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7761 else
7762 return arm_copy_undef (gdbarch, insn, dsc);
7763
7764 case 0x1c: case 0x1d:
7765 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7766 {
7767 if (bits (insn, 0, 3) == 0xf)
7768 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7769 else
7770 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7771 }
7772 else
7773 return arm_copy_undef (gdbarch, insn, dsc);
7774
7775 case 0x1e: case 0x1f:
7776 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7777 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7778 else
7779 return arm_copy_undef (gdbarch, insn, dsc);
7780 }
7781
7782 /* Should be unreachable. */
7783 return 1;
7784 }
7785
7786 static int
7787 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7788 struct regcache *regs,
7789 struct displaced_step_closure *dsc)
7790 {
7791 if (bit (insn, 25))
7792 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7793 else
7794 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7795 }
7796
7797 static int
7798 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7799 struct regcache *regs,
7800 struct displaced_step_closure *dsc)
7801 {
7802 unsigned int opcode = bits (insn, 20, 24);
7803
7804 switch (opcode)
7805 {
7806 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7807 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7808
7809 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7810 case 0x12: case 0x16:
7811 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7812
7813 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7814 case 0x13: case 0x17:
7815 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7816
7817 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7818 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7819 /* Note: no writeback for these instructions. Bit 25 will always be
7820 zero though (via caller), so the following works OK. */
7821 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7822 }
7823
7824 /* Should be unreachable. */
7825 return 1;
7826 }
7827
7828 /* Decode shifted register instructions. */
7829
7830 static int
7831 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7832 uint16_t insn2, struct regcache *regs,
7833 struct displaced_step_closure *dsc)
7834 {
7835 /* PC is only allowed to be used in instruction MOV. */
7836
7837 unsigned int op = bits (insn1, 5, 8);
7838 unsigned int rn = bits (insn1, 0, 3);
7839
7840 if (op == 0x2 && rn == 0xf) /* MOV */
7841 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7842 else
7843 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7844 "dp (shift reg)", dsc);
7845 }
7846
7847
7848 /* Decode extension register load/store. Exactly the same as
7849 arm_decode_ext_reg_ld_st. */
7850
7851 static int
7852 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7853 uint16_t insn2, struct regcache *regs,
7854 struct displaced_step_closure *dsc)
7855 {
7856 unsigned int opcode = bits (insn1, 4, 8);
7857
7858 switch (opcode)
7859 {
7860 case 0x04: case 0x05:
7861 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7862 "vfp/neon vmov", dsc);
7863
7864 case 0x08: case 0x0c: /* 01x00 */
7865 case 0x0a: case 0x0e: /* 01x10 */
7866 case 0x12: case 0x16: /* 10x10 */
7867 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7868 "vfp/neon vstm/vpush", dsc);
7869
7870 case 0x09: case 0x0d: /* 01x01 */
7871 case 0x0b: case 0x0f: /* 01x11 */
7872 case 0x13: case 0x17: /* 10x11 */
7873 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7874 "vfp/neon vldm/vpop", dsc);
7875
7876 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7877 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7878 "vstr", dsc);
7879 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7880 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7881 }
7882
7883 /* Should be unreachable. */
7884 return 1;
7885 }
7886
7887 static int
7888 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7889 struct regcache *regs, struct displaced_step_closure *dsc)
7890 {
7891 unsigned int op1 = bits (insn, 20, 25);
7892 int op = bit (insn, 4);
7893 unsigned int coproc = bits (insn, 8, 11);
7894 unsigned int rn = bits (insn, 16, 19);
7895
7896 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7897 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7898 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7899 && (coproc & 0xe) != 0xa)
7900 /* stc/stc2. */
7901 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7902 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7903 && (coproc & 0xe) != 0xa)
7904 /* ldc/ldc2 imm/lit. */
7905 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7906 else if ((op1 & 0x3e) == 0x00)
7907 return arm_copy_undef (gdbarch, insn, dsc);
7908 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7909 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7910 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7911 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7912 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7913 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7914 else if ((op1 & 0x30) == 0x20 && !op)
7915 {
7916 if ((coproc & 0xe) == 0xa)
7917 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7918 else
7919 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7920 }
7921 else if ((op1 & 0x30) == 0x20 && op)
7922 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7923 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7924 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7925 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7926 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7927 else if ((op1 & 0x30) == 0x30)
7928 return arm_copy_svc (gdbarch, insn, regs, dsc);
7929 else
7930 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7931 }
7932
7933 static int
7934 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7935 uint16_t insn2, struct regcache *regs,
7936 struct displaced_step_closure *dsc)
7937 {
7938 unsigned int coproc = bits (insn2, 8, 11);
7939 unsigned int op1 = bits (insn1, 4, 9);
7940 unsigned int bit_5_8 = bits (insn1, 5, 8);
7941 unsigned int bit_9 = bit (insn1, 9);
7942 unsigned int bit_4 = bit (insn1, 4);
7943 unsigned int rn = bits (insn1, 0, 3);
7944
7945 if (bit_9 == 0)
7946 {
7947 if (bit_5_8 == 2)
7948 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7949 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7950 dsc);
7951 else if (bit_5_8 == 0) /* UNDEFINED. */
7952 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7953 else
7954 {
7955 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7956 if ((coproc & 0xe) == 0xa)
7957 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7958 dsc);
7959 else /* coproc is not 101x. */
7960 {
7961 if (bit_4 == 0) /* STC/STC2. */
7962 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7963 "stc/stc2", dsc);
7964 else /* LDC/LDC2 {literal, immeidate}. */
7965 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7966 regs, dsc);
7967 }
7968 }
7969 }
7970 else
7971 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7972
7973 return 0;
7974 }
7975
7976 static void
7977 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7978 struct displaced_step_closure *dsc, int rd)
7979 {
7980 /* ADR Rd, #imm
7981
7982 Rewrite as:
7983
7984 Preparation: Rd <- PC
7985 Insn: ADD Rd, #imm
7986 Cleanup: Null.
7987 */
7988
7989 /* Rd <- PC */
7990 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7991 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7992 }
7993
7994 static int
7995 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7996 struct displaced_step_closure *dsc,
7997 int rd, unsigned int imm)
7998 {
7999
8000 /* Encoding T2: ADDS Rd, #imm */
8001 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8002
8003 install_pc_relative (gdbarch, regs, dsc, rd);
8004
8005 return 0;
8006 }
8007
8008 static int
8009 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8010 struct regcache *regs,
8011 struct displaced_step_closure *dsc)
8012 {
8013 unsigned int rd = bits (insn, 8, 10);
8014 unsigned int imm8 = bits (insn, 0, 7);
8015
8016 if (debug_displaced)
8017 fprintf_unfiltered (gdb_stdlog,
8018 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8019 rd, imm8, insn);
8020
8021 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8022 }
8023
8024 static int
8025 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8026 uint16_t insn2, struct regcache *regs,
8027 struct displaced_step_closure *dsc)
8028 {
8029 unsigned int rd = bits (insn2, 8, 11);
8030 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8031 extract raw immediate encoding rather than computing immediate. When
8032 generating ADD or SUB instruction, we can simply perform OR operation to
8033 set immediate into ADD. */
8034 unsigned int imm_3_8 = insn2 & 0x70ff;
8035 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8036
8037 if (debug_displaced)
8038 fprintf_unfiltered (gdb_stdlog,
8039 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8040 rd, imm_i, imm_3_8, insn1, insn2);
8041
8042 if (bit (insn1, 7)) /* Encoding T2 */
8043 {
8044 /* Encoding T3: SUB Rd, Rd, #imm */
8045 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8046 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8047 }
8048 else /* Encoding T3 */
8049 {
8050 /* Encoding T3: ADD Rd, Rd, #imm */
8051 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8052 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8053 }
8054 dsc->numinsns = 2;
8055
8056 install_pc_relative (gdbarch, regs, dsc, rd);
8057
8058 return 0;
8059 }
8060
8061 static int
8062 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8063 struct regcache *regs,
8064 struct displaced_step_closure *dsc)
8065 {
8066 unsigned int rt = bits (insn1, 8, 10);
8067 unsigned int pc;
8068 int imm8 = (bits (insn1, 0, 7) << 2);
8069 CORE_ADDR from = dsc->insn_addr;
8070
8071 /* LDR Rd, #imm8
8072
8073 Rwrite as:
8074
8075 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8076
8077 Insn: LDR R0, [R2, R3];
8078 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8079
8080 if (debug_displaced)
8081 fprintf_unfiltered (gdb_stdlog,
8082 "displaced: copying thumb ldr r%d [pc #%d]\n"
8083 , rt, imm8);
8084
8085 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8086 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8087 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8088 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8089 /* The assembler calculates the required value of the offset from the
8090 Align(PC,4) value of this instruction to the label. */
8091 pc = pc & 0xfffffffc;
8092
8093 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8094 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8095
8096 dsc->rd = rt;
8097 dsc->u.ldst.xfersize = 4;
8098 dsc->u.ldst.rn = 0;
8099 dsc->u.ldst.immed = 0;
8100 dsc->u.ldst.writeback = 0;
8101 dsc->u.ldst.restore_r4 = 0;
8102
8103 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8104
8105 dsc->cleanup = &cleanup_load;
8106
8107 return 0;
8108 }
8109
8110 /* Copy Thumb cbnz/cbz insruction. */
8111
8112 static int
8113 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8114 struct regcache *regs,
8115 struct displaced_step_closure *dsc)
8116 {
8117 int non_zero = bit (insn1, 11);
8118 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8119 CORE_ADDR from = dsc->insn_addr;
8120 int rn = bits (insn1, 0, 2);
8121 int rn_val = displaced_read_reg (regs, dsc, rn);
8122
8123 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8124 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8125 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8126 condition is false, let it be, cleanup_branch will do nothing. */
8127 if (dsc->u.branch.cond)
8128 {
8129 dsc->u.branch.cond = INST_AL;
8130 dsc->u.branch.dest = from + 4 + imm5;
8131 }
8132 else
8133 dsc->u.branch.dest = from + 2;
8134
8135 dsc->u.branch.link = 0;
8136 dsc->u.branch.exchange = 0;
8137
8138 if (debug_displaced)
8139 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8140 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8141 rn, rn_val, insn1, dsc->u.branch.dest);
8142
8143 dsc->modinsn[0] = THUMB_NOP;
8144
8145 dsc->cleanup = &cleanup_branch;
8146 return 0;
8147 }
8148
8149 /* Copy Table Branch Byte/Halfword */
8150 static int
8151 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8152 uint16_t insn2, struct regcache *regs,
8153 struct displaced_step_closure *dsc)
8154 {
8155 ULONGEST rn_val, rm_val;
8156 int is_tbh = bit (insn2, 4);
8157 CORE_ADDR halfwords = 0;
8158 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8159
8160 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8161 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8162
8163 if (is_tbh)
8164 {
8165 gdb_byte buf[2];
8166
8167 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8168 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8169 }
8170 else
8171 {
8172 gdb_byte buf[1];
8173
8174 target_read_memory (rn_val + rm_val, buf, 1);
8175 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8176 }
8177
8178 if (debug_displaced)
8179 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8180 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8181 (unsigned int) rn_val, (unsigned int) rm_val,
8182 (unsigned int) halfwords);
8183
8184 dsc->u.branch.cond = INST_AL;
8185 dsc->u.branch.link = 0;
8186 dsc->u.branch.exchange = 0;
8187 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8188
8189 dsc->cleanup = &cleanup_branch;
8190
8191 return 0;
8192 }
8193
8194 static void
8195 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8196 struct displaced_step_closure *dsc)
8197 {
8198 /* PC <- r7 */
8199 int val = displaced_read_reg (regs, dsc, 7);
8200 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8201
8202 /* r7 <- r8 */
8203 val = displaced_read_reg (regs, dsc, 8);
8204 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8205
8206 /* r8 <- tmp[0] */
8207 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8208
8209 }
8210
8211 static int
8212 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8213 struct regcache *regs,
8214 struct displaced_step_closure *dsc)
8215 {
8216 dsc->u.block.regmask = insn1 & 0x00ff;
8217
8218 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8219 to :
8220
8221 (1) register list is full, that is, r0-r7 are used.
8222 Prepare: tmp[0] <- r8
8223
8224 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8225 MOV r8, r7; Move value of r7 to r8;
8226 POP {r7}; Store PC value into r7.
8227
8228 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8229
8230 (2) register list is not full, supposing there are N registers in
8231 register list (except PC, 0 <= N <= 7).
8232 Prepare: for each i, 0 - N, tmp[i] <- ri.
8233
8234 POP {r0, r1, ...., rN};
8235
8236 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8237 from tmp[] properly.
8238 */
8239 if (debug_displaced)
8240 fprintf_unfiltered (gdb_stdlog,
8241 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8242 dsc->u.block.regmask, insn1);
8243
8244 if (dsc->u.block.regmask == 0xff)
8245 {
8246 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8247
8248 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8249 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8250 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8251
8252 dsc->numinsns = 3;
8253 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8254 }
8255 else
8256 {
8257 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8258 unsigned int new_regmask, bit = 1;
8259 unsigned int to = 0, from = 0, i, new_rn;
8260
8261 for (i = 0; i < num_in_list + 1; i++)
8262 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8263
8264 new_regmask = (1 << (num_in_list + 1)) - 1;
8265
8266 if (debug_displaced)
8267 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8268 "{..., pc}: original reg list %.4x,"
8269 " modified list %.4x\n"),
8270 (int) dsc->u.block.regmask, new_regmask);
8271
8272 dsc->u.block.regmask |= 0x8000;
8273 dsc->u.block.writeback = 0;
8274 dsc->u.block.cond = INST_AL;
8275
8276 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8277
8278 dsc->cleanup = &cleanup_block_load_pc;
8279 }
8280
8281 return 0;
8282 }
8283
8284 static void
8285 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8286 struct regcache *regs,
8287 struct displaced_step_closure *dsc)
8288 {
8289 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8290 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8291 int err = 0;
8292
8293 /* 16-bit thumb instructions. */
8294 switch (op_bit_12_15)
8295 {
8296 /* Shift (imme), add, subtract, move and compare. */
8297 case 0: case 1: case 2: case 3:
8298 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8299 "shift/add/sub/mov/cmp",
8300 dsc);
8301 break;
8302 case 4:
8303 switch (op_bit_10_11)
8304 {
8305 case 0: /* Data-processing */
8306 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8307 "data-processing",
8308 dsc);
8309 break;
8310 case 1: /* Special data instructions and branch and exchange. */
8311 {
8312 unsigned short op = bits (insn1, 7, 9);
8313 if (op == 6 || op == 7) /* BX or BLX */
8314 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8315 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8316 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8317 else
8318 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8319 dsc);
8320 }
8321 break;
8322 default: /* LDR (literal) */
8323 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8324 }
8325 break;
8326 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8327 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8328 break;
8329 case 10:
8330 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8331 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8332 else /* Generate SP-relative address */
8333 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8334 break;
8335 case 11: /* Misc 16-bit instructions */
8336 {
8337 switch (bits (insn1, 8, 11))
8338 {
8339 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8340 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8341 break;
8342 case 12: case 13: /* POP */
8343 if (bit (insn1, 8)) /* PC is in register list. */
8344 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8345 else
8346 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8347 break;
8348 case 15: /* If-Then, and hints */
8349 if (bits (insn1, 0, 3))
8350 /* If-Then makes up to four following instructions conditional.
8351 IT instruction itself is not conditional, so handle it as a
8352 common unmodified instruction. */
8353 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8354 dsc);
8355 else
8356 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8357 break;
8358 default:
8359 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8360 }
8361 }
8362 break;
8363 case 12:
8364 if (op_bit_10_11 < 2) /* Store multiple registers */
8365 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8366 else /* Load multiple registers */
8367 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8368 break;
8369 case 13: /* Conditional branch and supervisor call */
8370 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8371 err = thumb_copy_b (gdbarch, insn1, dsc);
8372 else
8373 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8374 break;
8375 case 14: /* Unconditional branch */
8376 err = thumb_copy_b (gdbarch, insn1, dsc);
8377 break;
8378 default:
8379 err = 1;
8380 }
8381
8382 if (err)
8383 internal_error (__FILE__, __LINE__,
8384 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8385 }
8386
8387 static int
8388 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8389 uint16_t insn1, uint16_t insn2,
8390 struct regcache *regs,
8391 struct displaced_step_closure *dsc)
8392 {
8393 int rt = bits (insn2, 12, 15);
8394 int rn = bits (insn1, 0, 3);
8395 int op1 = bits (insn1, 7, 8);
8396 int err = 0;
8397
8398 switch (bits (insn1, 5, 6))
8399 {
8400 case 0: /* Load byte and memory hints */
8401 if (rt == 0xf) /* PLD/PLI */
8402 {
8403 if (rn == 0xf)
8404 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8405 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8406 else
8407 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8408 "pli/pld", dsc);
8409 }
8410 else
8411 {
8412 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8413 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8414 1);
8415 else
8416 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8417 "ldrb{reg, immediate}/ldrbt",
8418 dsc);
8419 }
8420
8421 break;
8422 case 1: /* Load halfword and memory hints. */
8423 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8424 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8425 "pld/unalloc memhint", dsc);
8426 else
8427 {
8428 if (rn == 0xf)
8429 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8430 2);
8431 else
8432 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8433 "ldrh/ldrht", dsc);
8434 }
8435 break;
8436 case 2: /* Load word */
8437 {
8438 int insn2_bit_8_11 = bits (insn2, 8, 11);
8439
8440 if (rn == 0xf)
8441 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8442 else if (op1 == 0x1) /* Encoding T3 */
8443 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8444 0, 1);
8445 else /* op1 == 0x0 */
8446 {
8447 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8448 /* LDR (immediate) */
8449 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8450 dsc, bit (insn2, 8), 1);
8451 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8452 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8453 "ldrt", dsc);
8454 else
8455 /* LDR (register) */
8456 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8457 dsc, 0, 0);
8458 }
8459 break;
8460 }
8461 default:
8462 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8463 break;
8464 }
8465 return 0;
8466 }
8467
8468 static void
8469 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8470 uint16_t insn2, struct regcache *regs,
8471 struct displaced_step_closure *dsc)
8472 {
8473 int err = 0;
8474 unsigned short op = bit (insn2, 15);
8475 unsigned int op1 = bits (insn1, 11, 12);
8476
8477 switch (op1)
8478 {
8479 case 1:
8480 {
8481 switch (bits (insn1, 9, 10))
8482 {
8483 case 0:
8484 if (bit (insn1, 6))
8485 {
8486 /* Load/store {dual, execlusive}, table branch. */
8487 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8488 && bits (insn2, 5, 7) == 0)
8489 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8490 dsc);
8491 else
8492 /* PC is not allowed to use in load/store {dual, exclusive}
8493 instructions. */
8494 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8495 "load/store dual/ex", dsc);
8496 }
8497 else /* load/store multiple */
8498 {
8499 switch (bits (insn1, 7, 8))
8500 {
8501 case 0: case 3: /* SRS, RFE */
8502 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8503 "srs/rfe", dsc);
8504 break;
8505 case 1: case 2: /* LDM/STM/PUSH/POP */
8506 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8507 break;
8508 }
8509 }
8510 break;
8511
8512 case 1:
8513 /* Data-processing (shift register). */
8514 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8515 dsc);
8516 break;
8517 default: /* Coprocessor instructions. */
8518 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8519 break;
8520 }
8521 break;
8522 }
8523 case 2: /* op1 = 2 */
8524 if (op) /* Branch and misc control. */
8525 {
8526 if (bit (insn2, 14) /* BLX/BL */
8527 || bit (insn2, 12) /* Unconditional branch */
8528 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8529 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8530 else
8531 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8532 "misc ctrl", dsc);
8533 }
8534 else
8535 {
8536 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8537 {
8538 int op = bits (insn1, 4, 8);
8539 int rn = bits (insn1, 0, 3);
8540 if ((op == 0 || op == 0xa) && rn == 0xf)
8541 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8542 regs, dsc);
8543 else
8544 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8545 "dp/pb", dsc);
8546 }
8547 else /* Data processing (modified immeidate) */
8548 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8549 "dp/mi", dsc);
8550 }
8551 break;
8552 case 3: /* op1 = 3 */
8553 switch (bits (insn1, 9, 10))
8554 {
8555 case 0:
8556 if (bit (insn1, 4))
8557 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8558 regs, dsc);
8559 else /* NEON Load/Store and Store single data item */
8560 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8561 "neon elt/struct load/store",
8562 dsc);
8563 break;
8564 case 1: /* op1 = 3, bits (9, 10) == 1 */
8565 switch (bits (insn1, 7, 8))
8566 {
8567 case 0: case 1: /* Data processing (register) */
8568 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8569 "dp(reg)", dsc);
8570 break;
8571 case 2: /* Multiply and absolute difference */
8572 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8573 "mul/mua/diff", dsc);
8574 break;
8575 case 3: /* Long multiply and divide */
8576 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8577 "lmul/lmua", dsc);
8578 break;
8579 }
8580 break;
8581 default: /* Coprocessor instructions */
8582 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8583 break;
8584 }
8585 break;
8586 default:
8587 err = 1;
8588 }
8589
8590 if (err)
8591 internal_error (__FILE__, __LINE__,
8592 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8593
8594 }
8595
8596 static void
8597 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8598 CORE_ADDR to, struct regcache *regs,
8599 struct displaced_step_closure *dsc)
8600 {
8601 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8602 uint16_t insn1
8603 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8604
8605 if (debug_displaced)
8606 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8607 "at %.8lx\n", insn1, (unsigned long) from);
8608
8609 dsc->is_thumb = 1;
8610 dsc->insn_size = thumb_insn_size (insn1);
8611 if (thumb_insn_size (insn1) == 4)
8612 {
8613 uint16_t insn2
8614 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8615 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8616 }
8617 else
8618 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8619 }
8620
8621 void
8622 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8623 CORE_ADDR to, struct regcache *regs,
8624 struct displaced_step_closure *dsc)
8625 {
8626 int err = 0;
8627 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8628 uint32_t insn;
8629
8630 /* Most displaced instructions use a 1-instruction scratch space, so set this
8631 here and override below if/when necessary. */
8632 dsc->numinsns = 1;
8633 dsc->insn_addr = from;
8634 dsc->scratch_base = to;
8635 dsc->cleanup = NULL;
8636 dsc->wrote_to_pc = 0;
8637
8638 if (!displaced_in_arm_mode (regs))
8639 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8640
8641 dsc->is_thumb = 0;
8642 dsc->insn_size = 4;
8643 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8644 if (debug_displaced)
8645 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8646 "at %.8lx\n", (unsigned long) insn,
8647 (unsigned long) from);
8648
8649 if ((insn & 0xf0000000) == 0xf0000000)
8650 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8651 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8652 {
8653 case 0x0: case 0x1: case 0x2: case 0x3:
8654 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8655 break;
8656
8657 case 0x4: case 0x5: case 0x6:
8658 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8659 break;
8660
8661 case 0x7:
8662 err = arm_decode_media (gdbarch, insn, dsc);
8663 break;
8664
8665 case 0x8: case 0x9: case 0xa: case 0xb:
8666 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8667 break;
8668
8669 case 0xc: case 0xd: case 0xe: case 0xf:
8670 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8671 break;
8672 }
8673
8674 if (err)
8675 internal_error (__FILE__, __LINE__,
8676 _("arm_process_displaced_insn: Instruction decode error"));
8677 }
8678
8679 /* Actually set up the scratch space for a displaced instruction. */
8680
8681 void
8682 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8683 CORE_ADDR to, struct displaced_step_closure *dsc)
8684 {
8685 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8686 unsigned int i, len, offset;
8687 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8688 int size = dsc->is_thumb? 2 : 4;
8689 const gdb_byte *bkp_insn;
8690
8691 offset = 0;
8692 /* Poke modified instruction(s). */
8693 for (i = 0; i < dsc->numinsns; i++)
8694 {
8695 if (debug_displaced)
8696 {
8697 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8698 if (size == 4)
8699 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8700 dsc->modinsn[i]);
8701 else if (size == 2)
8702 fprintf_unfiltered (gdb_stdlog, "%.4x",
8703 (unsigned short)dsc->modinsn[i]);
8704
8705 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8706 (unsigned long) to + offset);
8707
8708 }
8709 write_memory_unsigned_integer (to + offset, size,
8710 byte_order_for_code,
8711 dsc->modinsn[i]);
8712 offset += size;
8713 }
8714
8715 /* Choose the correct breakpoint instruction. */
8716 if (dsc->is_thumb)
8717 {
8718 bkp_insn = tdep->thumb_breakpoint;
8719 len = tdep->thumb_breakpoint_size;
8720 }
8721 else
8722 {
8723 bkp_insn = tdep->arm_breakpoint;
8724 len = tdep->arm_breakpoint_size;
8725 }
8726
8727 /* Put breakpoint afterwards. */
8728 write_memory (to + offset, bkp_insn, len);
8729
8730 if (debug_displaced)
8731 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8732 paddress (gdbarch, from), paddress (gdbarch, to));
8733 }
8734
8735 /* Entry point for copying an instruction into scratch space for displaced
8736 stepping. */
8737
8738 struct displaced_step_closure *
8739 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8740 CORE_ADDR from, CORE_ADDR to,
8741 struct regcache *regs)
8742 {
8743 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
8744
8745 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8746 arm_displaced_init_closure (gdbarch, from, to, dsc);
8747
8748 return dsc;
8749 }
8750
8751 /* Entry point for cleaning things up after a displaced instruction has been
8752 single-stepped. */
8753
8754 void
8755 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8756 struct displaced_step_closure *dsc,
8757 CORE_ADDR from, CORE_ADDR to,
8758 struct regcache *regs)
8759 {
8760 if (dsc->cleanup)
8761 dsc->cleanup (gdbarch, regs, dsc);
8762
8763 if (!dsc->wrote_to_pc)
8764 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8765 dsc->insn_addr + dsc->insn_size);
8766
8767 }
8768
8769 #include "bfd-in2.h"
8770 #include "libcoff.h"
8771
8772 static int
8773 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8774 {
8775 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
8776
8777 if (arm_pc_is_thumb (gdbarch, memaddr))
8778 {
8779 static asymbol *asym;
8780 static combined_entry_type ce;
8781 static struct coff_symbol_struct csym;
8782 static struct bfd fake_bfd;
8783 static bfd_target fake_target;
8784
8785 if (csym.native == NULL)
8786 {
8787 /* Create a fake symbol vector containing a Thumb symbol.
8788 This is solely so that the code in print_insn_little_arm()
8789 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8790 the presence of a Thumb symbol and switch to decoding
8791 Thumb instructions. */
8792
8793 fake_target.flavour = bfd_target_coff_flavour;
8794 fake_bfd.xvec = &fake_target;
8795 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8796 csym.native = &ce;
8797 csym.symbol.the_bfd = &fake_bfd;
8798 csym.symbol.name = "fake";
8799 asym = (asymbol *) & csym;
8800 }
8801
8802 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8803 info->symbols = &asym;
8804 }
8805 else
8806 info->symbols = NULL;
8807
8808 if (info->endian == BFD_ENDIAN_BIG)
8809 return print_insn_big_arm (memaddr, info);
8810 else
8811 return print_insn_little_arm (memaddr, info);
8812 }
8813
8814 /* The following define instruction sequences that will cause ARM
8815 cpu's to take an undefined instruction trap. These are used to
8816 signal a breakpoint to GDB.
8817
8818 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8819 modes. A different instruction is required for each mode. The ARM
8820 cpu's can also be big or little endian. Thus four different
8821 instructions are needed to support all cases.
8822
8823 Note: ARMv4 defines several new instructions that will take the
8824 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8825 not in fact add the new instructions. The new undefined
8826 instructions in ARMv4 are all instructions that had no defined
8827 behaviour in earlier chips. There is no guarantee that they will
8828 raise an exception, but may be treated as NOP's. In practice, it
8829 may only safe to rely on instructions matching:
8830
8831 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8832 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8833 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8834
8835 Even this may only true if the condition predicate is true. The
8836 following use a condition predicate of ALWAYS so it is always TRUE.
8837
8838 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8839 and NetBSD all use a software interrupt rather than an undefined
8840 instruction to force a trap. This can be handled by by the
8841 abi-specific code during establishment of the gdbarch vector. */
8842
8843 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8844 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8845 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8846 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8847
8848 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8849 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8850 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8851 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8852
8853 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8854 the program counter value to determine whether a 16-bit or 32-bit
8855 breakpoint should be used. It returns a pointer to a string of
8856 bytes that encode a breakpoint instruction, stores the length of
8857 the string to *lenptr, and adjusts the program counter (if
8858 necessary) to point to the actual memory location where the
8859 breakpoint should be inserted. */
8860
8861 static const unsigned char *
8862 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8863 {
8864 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8865 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8866
8867 if (arm_pc_is_thumb (gdbarch, *pcptr))
8868 {
8869 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8870
8871 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8872 check whether we are replacing a 32-bit instruction. */
8873 if (tdep->thumb2_breakpoint != NULL)
8874 {
8875 gdb_byte buf[2];
8876 if (target_read_memory (*pcptr, buf, 2) == 0)
8877 {
8878 unsigned short inst1;
8879 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8880 if (thumb_insn_size (inst1) == 4)
8881 {
8882 *lenptr = tdep->thumb2_breakpoint_size;
8883 return tdep->thumb2_breakpoint;
8884 }
8885 }
8886 }
8887
8888 *lenptr = tdep->thumb_breakpoint_size;
8889 return tdep->thumb_breakpoint;
8890 }
8891 else
8892 {
8893 *lenptr = tdep->arm_breakpoint_size;
8894 return tdep->arm_breakpoint;
8895 }
8896 }
8897
8898 static void
8899 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8900 int *kindptr)
8901 {
8902 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8903
8904 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8905 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8906 that this is not confused with a 32-bit ARM breakpoint. */
8907 *kindptr = 3;
8908 }
8909
8910 /* Extract from an array REGBUF containing the (raw) register state a
8911 function return value of type TYPE, and copy that, in virtual
8912 format, into VALBUF. */
8913
8914 static void
8915 arm_extract_return_value (struct type *type, struct regcache *regs,
8916 gdb_byte *valbuf)
8917 {
8918 struct gdbarch *gdbarch = get_regcache_arch (regs);
8919 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8920
8921 if (TYPE_CODE_FLT == TYPE_CODE (type))
8922 {
8923 switch (gdbarch_tdep (gdbarch)->fp_model)
8924 {
8925 case ARM_FLOAT_FPA:
8926 {
8927 /* The value is in register F0 in internal format. We need to
8928 extract the raw value and then convert it to the desired
8929 internal type. */
8930 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8931
8932 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8933 convert_from_extended (floatformat_from_type (type), tmpbuf,
8934 valbuf, gdbarch_byte_order (gdbarch));
8935 }
8936 break;
8937
8938 case ARM_FLOAT_SOFT_FPA:
8939 case ARM_FLOAT_SOFT_VFP:
8940 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8941 not using the VFP ABI code. */
8942 case ARM_FLOAT_VFP:
8943 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8944 if (TYPE_LENGTH (type) > 4)
8945 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8946 valbuf + INT_REGISTER_SIZE);
8947 break;
8948
8949 default:
8950 internal_error (__FILE__, __LINE__,
8951 _("arm_extract_return_value: "
8952 "Floating point model not supported"));
8953 break;
8954 }
8955 }
8956 else if (TYPE_CODE (type) == TYPE_CODE_INT
8957 || TYPE_CODE (type) == TYPE_CODE_CHAR
8958 || TYPE_CODE (type) == TYPE_CODE_BOOL
8959 || TYPE_CODE (type) == TYPE_CODE_PTR
8960 || TYPE_CODE (type) == TYPE_CODE_REF
8961 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8962 {
8963 /* If the type is a plain integer, then the access is
8964 straight-forward. Otherwise we have to play around a bit
8965 more. */
8966 int len = TYPE_LENGTH (type);
8967 int regno = ARM_A1_REGNUM;
8968 ULONGEST tmp;
8969
8970 while (len > 0)
8971 {
8972 /* By using store_unsigned_integer we avoid having to do
8973 anything special for small big-endian values. */
8974 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8975 store_unsigned_integer (valbuf,
8976 (len > INT_REGISTER_SIZE
8977 ? INT_REGISTER_SIZE : len),
8978 byte_order, tmp);
8979 len -= INT_REGISTER_SIZE;
8980 valbuf += INT_REGISTER_SIZE;
8981 }
8982 }
8983 else
8984 {
8985 /* For a structure or union the behaviour is as if the value had
8986 been stored to word-aligned memory and then loaded into
8987 registers with 32-bit load instruction(s). */
8988 int len = TYPE_LENGTH (type);
8989 int regno = ARM_A1_REGNUM;
8990 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8991
8992 while (len > 0)
8993 {
8994 regcache_cooked_read (regs, regno++, tmpbuf);
8995 memcpy (valbuf, tmpbuf,
8996 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8997 len -= INT_REGISTER_SIZE;
8998 valbuf += INT_REGISTER_SIZE;
8999 }
9000 }
9001 }
9002
9003
9004 /* Will a function return an aggregate type in memory or in a
9005 register? Return 0 if an aggregate type can be returned in a
9006 register, 1 if it must be returned in memory. */
9007
9008 static int
9009 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9010 {
9011 int nRc;
9012 enum type_code code;
9013
9014 type = check_typedef (type);
9015
9016 /* In the ARM ABI, "integer" like aggregate types are returned in
9017 registers. For an aggregate type to be integer like, its size
9018 must be less than or equal to INT_REGISTER_SIZE and the
9019 offset of each addressable subfield must be zero. Note that bit
9020 fields are not addressable, and all addressable subfields of
9021 unions always start at offset zero.
9022
9023 This function is based on the behaviour of GCC 2.95.1.
9024 See: gcc/arm.c: arm_return_in_memory() for details.
9025
9026 Note: All versions of GCC before GCC 2.95.2 do not set up the
9027 parameters correctly for a function returning the following
9028 structure: struct { float f;}; This should be returned in memory,
9029 not a register. Richard Earnshaw sent me a patch, but I do not
9030 know of any way to detect if a function like the above has been
9031 compiled with the correct calling convention. */
9032
9033 /* All aggregate types that won't fit in a register must be returned
9034 in memory. */
9035 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9036 {
9037 return 1;
9038 }
9039
9040 /* The AAPCS says all aggregates not larger than a word are returned
9041 in a register. */
9042 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9043 return 0;
9044
9045 /* The only aggregate types that can be returned in a register are
9046 structs and unions. Arrays must be returned in memory. */
9047 code = TYPE_CODE (type);
9048 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9049 {
9050 return 1;
9051 }
9052
9053 /* Assume all other aggregate types can be returned in a register.
9054 Run a check for structures, unions and arrays. */
9055 nRc = 0;
9056
9057 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9058 {
9059 int i;
9060 /* Need to check if this struct/union is "integer" like. For
9061 this to be true, its size must be less than or equal to
9062 INT_REGISTER_SIZE and the offset of each addressable
9063 subfield must be zero. Note that bit fields are not
9064 addressable, and unions always start at offset zero. If any
9065 of the subfields is a floating point type, the struct/union
9066 cannot be an integer type. */
9067
9068 /* For each field in the object, check:
9069 1) Is it FP? --> yes, nRc = 1;
9070 2) Is it addressable (bitpos != 0) and
9071 not packed (bitsize == 0)?
9072 --> yes, nRc = 1
9073 */
9074
9075 for (i = 0; i < TYPE_NFIELDS (type); i++)
9076 {
9077 enum type_code field_type_code;
9078 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9079 i)));
9080
9081 /* Is it a floating point type field? */
9082 if (field_type_code == TYPE_CODE_FLT)
9083 {
9084 nRc = 1;
9085 break;
9086 }
9087
9088 /* If bitpos != 0, then we have to care about it. */
9089 if (TYPE_FIELD_BITPOS (type, i) != 0)
9090 {
9091 /* Bitfields are not addressable. If the field bitsize is
9092 zero, then the field is not packed. Hence it cannot be
9093 a bitfield or any other packed type. */
9094 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9095 {
9096 nRc = 1;
9097 break;
9098 }
9099 }
9100 }
9101 }
9102
9103 return nRc;
9104 }
9105
9106 /* Write into appropriate registers a function return value of type
9107 TYPE, given in virtual format. */
9108
9109 static void
9110 arm_store_return_value (struct type *type, struct regcache *regs,
9111 const gdb_byte *valbuf)
9112 {
9113 struct gdbarch *gdbarch = get_regcache_arch (regs);
9114 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9115
9116 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9117 {
9118 gdb_byte buf[MAX_REGISTER_SIZE];
9119
9120 switch (gdbarch_tdep (gdbarch)->fp_model)
9121 {
9122 case ARM_FLOAT_FPA:
9123
9124 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9125 gdbarch_byte_order (gdbarch));
9126 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9127 break;
9128
9129 case ARM_FLOAT_SOFT_FPA:
9130 case ARM_FLOAT_SOFT_VFP:
9131 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9132 not using the VFP ABI code. */
9133 case ARM_FLOAT_VFP:
9134 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9135 if (TYPE_LENGTH (type) > 4)
9136 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9137 valbuf + INT_REGISTER_SIZE);
9138 break;
9139
9140 default:
9141 internal_error (__FILE__, __LINE__,
9142 _("arm_store_return_value: Floating "
9143 "point model not supported"));
9144 break;
9145 }
9146 }
9147 else if (TYPE_CODE (type) == TYPE_CODE_INT
9148 || TYPE_CODE (type) == TYPE_CODE_CHAR
9149 || TYPE_CODE (type) == TYPE_CODE_BOOL
9150 || TYPE_CODE (type) == TYPE_CODE_PTR
9151 || TYPE_CODE (type) == TYPE_CODE_REF
9152 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9153 {
9154 if (TYPE_LENGTH (type) <= 4)
9155 {
9156 /* Values of one word or less are zero/sign-extended and
9157 returned in r0. */
9158 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9159 LONGEST val = unpack_long (type, valbuf);
9160
9161 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9162 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9163 }
9164 else
9165 {
9166 /* Integral values greater than one word are stored in consecutive
9167 registers starting with r0. This will always be a multiple of
9168 the regiser size. */
9169 int len = TYPE_LENGTH (type);
9170 int regno = ARM_A1_REGNUM;
9171
9172 while (len > 0)
9173 {
9174 regcache_cooked_write (regs, regno++, valbuf);
9175 len -= INT_REGISTER_SIZE;
9176 valbuf += INT_REGISTER_SIZE;
9177 }
9178 }
9179 }
9180 else
9181 {
9182 /* For a structure or union the behaviour is as if the value had
9183 been stored to word-aligned memory and then loaded into
9184 registers with 32-bit load instruction(s). */
9185 int len = TYPE_LENGTH (type);
9186 int regno = ARM_A1_REGNUM;
9187 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9188
9189 while (len > 0)
9190 {
9191 memcpy (tmpbuf, valbuf,
9192 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9193 regcache_cooked_write (regs, regno++, tmpbuf);
9194 len -= INT_REGISTER_SIZE;
9195 valbuf += INT_REGISTER_SIZE;
9196 }
9197 }
9198 }
9199
9200
9201 /* Handle function return values. */
9202
9203 static enum return_value_convention
9204 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9205 struct type *valtype, struct regcache *regcache,
9206 gdb_byte *readbuf, const gdb_byte *writebuf)
9207 {
9208 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9209 struct type *func_type = function ? value_type (function) : NULL;
9210 enum arm_vfp_cprc_base_type vfp_base_type;
9211 int vfp_base_count;
9212
9213 if (arm_vfp_abi_for_function (gdbarch, func_type)
9214 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9215 {
9216 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9217 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9218 int i;
9219 for (i = 0; i < vfp_base_count; i++)
9220 {
9221 if (reg_char == 'q')
9222 {
9223 if (writebuf)
9224 arm_neon_quad_write (gdbarch, regcache, i,
9225 writebuf + i * unit_length);
9226
9227 if (readbuf)
9228 arm_neon_quad_read (gdbarch, regcache, i,
9229 readbuf + i * unit_length);
9230 }
9231 else
9232 {
9233 char name_buf[4];
9234 int regnum;
9235
9236 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9237 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9238 strlen (name_buf));
9239 if (writebuf)
9240 regcache_cooked_write (regcache, regnum,
9241 writebuf + i * unit_length);
9242 if (readbuf)
9243 regcache_cooked_read (regcache, regnum,
9244 readbuf + i * unit_length);
9245 }
9246 }
9247 return RETURN_VALUE_REGISTER_CONVENTION;
9248 }
9249
9250 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9251 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9252 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9253 {
9254 if (tdep->struct_return == pcc_struct_return
9255 || arm_return_in_memory (gdbarch, valtype))
9256 return RETURN_VALUE_STRUCT_CONVENTION;
9257 }
9258
9259 /* AAPCS returns complex types longer than a register in memory. */
9260 if (tdep->arm_abi != ARM_ABI_APCS
9261 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9262 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9263 return RETURN_VALUE_STRUCT_CONVENTION;
9264
9265 if (writebuf)
9266 arm_store_return_value (valtype, regcache, writebuf);
9267
9268 if (readbuf)
9269 arm_extract_return_value (valtype, regcache, readbuf);
9270
9271 return RETURN_VALUE_REGISTER_CONVENTION;
9272 }
9273
9274
9275 static int
9276 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9277 {
9278 struct gdbarch *gdbarch = get_frame_arch (frame);
9279 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9280 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9281 CORE_ADDR jb_addr;
9282 gdb_byte buf[INT_REGISTER_SIZE];
9283
9284 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9285
9286 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9287 INT_REGISTER_SIZE))
9288 return 0;
9289
9290 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9291 return 1;
9292 }
9293
9294 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9295 return the target PC. Otherwise return 0. */
9296
9297 CORE_ADDR
9298 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9299 {
9300 const char *name;
9301 int namelen;
9302 CORE_ADDR start_addr;
9303
9304 /* Find the starting address and name of the function containing the PC. */
9305 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9306 {
9307 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9308 check here. */
9309 start_addr = arm_skip_bx_reg (frame, pc);
9310 if (start_addr != 0)
9311 return start_addr;
9312
9313 return 0;
9314 }
9315
9316 /* If PC is in a Thumb call or return stub, return the address of the
9317 target PC, which is in a register. The thunk functions are called
9318 _call_via_xx, where x is the register name. The possible names
9319 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9320 functions, named __ARM_call_via_r[0-7]. */
9321 if (startswith (name, "_call_via_")
9322 || startswith (name, "__ARM_call_via_"))
9323 {
9324 /* Use the name suffix to determine which register contains the
9325 target PC. */
9326 static char *table[15] =
9327 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9328 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9329 };
9330 int regno;
9331 int offset = strlen (name) - 2;
9332
9333 for (regno = 0; regno <= 14; regno++)
9334 if (strcmp (&name[offset], table[regno]) == 0)
9335 return get_frame_register_unsigned (frame, regno);
9336 }
9337
9338 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9339 non-interworking calls to foo. We could decode the stubs
9340 to find the target but it's easier to use the symbol table. */
9341 namelen = strlen (name);
9342 if (name[0] == '_' && name[1] == '_'
9343 && ((namelen > 2 + strlen ("_from_thumb")
9344 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9345 || (namelen > 2 + strlen ("_from_arm")
9346 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9347 {
9348 char *target_name;
9349 int target_len = namelen - 2;
9350 struct bound_minimal_symbol minsym;
9351 struct objfile *objfile;
9352 struct obj_section *sec;
9353
9354 if (name[namelen - 1] == 'b')
9355 target_len -= strlen ("_from_thumb");
9356 else
9357 target_len -= strlen ("_from_arm");
9358
9359 target_name = (char *) alloca (target_len + 1);
9360 memcpy (target_name, name + 2, target_len);
9361 target_name[target_len] = '\0';
9362
9363 sec = find_pc_section (pc);
9364 objfile = (sec == NULL) ? NULL : sec->objfile;
9365 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9366 if (minsym.minsym != NULL)
9367 return BMSYMBOL_VALUE_ADDRESS (minsym);
9368 else
9369 return 0;
9370 }
9371
9372 return 0; /* not a stub */
9373 }
9374
9375 static void
9376 set_arm_command (char *args, int from_tty)
9377 {
9378 printf_unfiltered (_("\
9379 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9380 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9381 }
9382
9383 static void
9384 show_arm_command (char *args, int from_tty)
9385 {
9386 cmd_show_list (showarmcmdlist, from_tty, "");
9387 }
9388
9389 static void
9390 arm_update_current_architecture (void)
9391 {
9392 struct gdbarch_info info;
9393
9394 /* If the current architecture is not ARM, we have nothing to do. */
9395 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9396 return;
9397
9398 /* Update the architecture. */
9399 gdbarch_info_init (&info);
9400
9401 if (!gdbarch_update_p (info))
9402 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9403 }
9404
9405 static void
9406 set_fp_model_sfunc (char *args, int from_tty,
9407 struct cmd_list_element *c)
9408 {
9409 int fp_model;
9410
9411 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9412 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9413 {
9414 arm_fp_model = (enum arm_float_model) fp_model;
9415 break;
9416 }
9417
9418 if (fp_model == ARM_FLOAT_LAST)
9419 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9420 current_fp_model);
9421
9422 arm_update_current_architecture ();
9423 }
9424
9425 static void
9426 show_fp_model (struct ui_file *file, int from_tty,
9427 struct cmd_list_element *c, const char *value)
9428 {
9429 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9430
9431 if (arm_fp_model == ARM_FLOAT_AUTO
9432 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9433 fprintf_filtered (file, _("\
9434 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9435 fp_model_strings[tdep->fp_model]);
9436 else
9437 fprintf_filtered (file, _("\
9438 The current ARM floating point model is \"%s\".\n"),
9439 fp_model_strings[arm_fp_model]);
9440 }
9441
9442 static void
9443 arm_set_abi (char *args, int from_tty,
9444 struct cmd_list_element *c)
9445 {
9446 int arm_abi;
9447
9448 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9449 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9450 {
9451 arm_abi_global = (enum arm_abi_kind) arm_abi;
9452 break;
9453 }
9454
9455 if (arm_abi == ARM_ABI_LAST)
9456 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9457 arm_abi_string);
9458
9459 arm_update_current_architecture ();
9460 }
9461
9462 static void
9463 arm_show_abi (struct ui_file *file, int from_tty,
9464 struct cmd_list_element *c, const char *value)
9465 {
9466 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9467
9468 if (arm_abi_global == ARM_ABI_AUTO
9469 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9470 fprintf_filtered (file, _("\
9471 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9472 arm_abi_strings[tdep->arm_abi]);
9473 else
9474 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9475 arm_abi_string);
9476 }
9477
9478 static void
9479 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9480 struct cmd_list_element *c, const char *value)
9481 {
9482 fprintf_filtered (file,
9483 _("The current execution mode assumed "
9484 "(when symbols are unavailable) is \"%s\".\n"),
9485 arm_fallback_mode_string);
9486 }
9487
9488 static void
9489 arm_show_force_mode (struct ui_file *file, int from_tty,
9490 struct cmd_list_element *c, const char *value)
9491 {
9492 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9493
9494 fprintf_filtered (file,
9495 _("The current execution mode assumed "
9496 "(even when symbols are available) is \"%s\".\n"),
9497 arm_force_mode_string);
9498 }
9499
9500 /* If the user changes the register disassembly style used for info
9501 register and other commands, we have to also switch the style used
9502 in opcodes for disassembly output. This function is run in the "set
9503 arm disassembly" command, and does that. */
9504
9505 static void
9506 set_disassembly_style_sfunc (char *args, int from_tty,
9507 struct cmd_list_element *c)
9508 {
9509 set_disassembly_style ();
9510 }
9511 \f
9512 /* Return the ARM register name corresponding to register I. */
9513 static const char *
9514 arm_register_name (struct gdbarch *gdbarch, int i)
9515 {
9516 const int num_regs = gdbarch_num_regs (gdbarch);
9517
9518 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9519 && i >= num_regs && i < num_regs + 32)
9520 {
9521 static const char *const vfp_pseudo_names[] = {
9522 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9523 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9524 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9525 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9526 };
9527
9528 return vfp_pseudo_names[i - num_regs];
9529 }
9530
9531 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9532 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9533 {
9534 static const char *const neon_pseudo_names[] = {
9535 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9536 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9537 };
9538
9539 return neon_pseudo_names[i - num_regs - 32];
9540 }
9541
9542 if (i >= ARRAY_SIZE (arm_register_names))
9543 /* These registers are only supported on targets which supply
9544 an XML description. */
9545 return "";
9546
9547 return arm_register_names[i];
9548 }
9549
9550 static void
9551 set_disassembly_style (void)
9552 {
9553 int current;
9554
9555 /* Find the style that the user wants. */
9556 for (current = 0; current < num_disassembly_options; current++)
9557 if (disassembly_style == valid_disassembly_styles[current])
9558 break;
9559 gdb_assert (current < num_disassembly_options);
9560
9561 /* Synchronize the disassembler. */
9562 set_arm_regname_option (current);
9563 }
9564
9565 /* Test whether the coff symbol specific value corresponds to a Thumb
9566 function. */
9567
9568 static int
9569 coff_sym_is_thumb (int val)
9570 {
9571 return (val == C_THUMBEXT
9572 || val == C_THUMBSTAT
9573 || val == C_THUMBEXTFUNC
9574 || val == C_THUMBSTATFUNC
9575 || val == C_THUMBLABEL);
9576 }
9577
9578 /* arm_coff_make_msymbol_special()
9579 arm_elf_make_msymbol_special()
9580
9581 These functions test whether the COFF or ELF symbol corresponds to
9582 an address in thumb code, and set a "special" bit in a minimal
9583 symbol to indicate that it does. */
9584
9585 static void
9586 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9587 {
9588 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9589 == ST_BRANCH_TO_THUMB)
9590 MSYMBOL_SET_SPECIAL (msym);
9591 }
9592
9593 static void
9594 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9595 {
9596 if (coff_sym_is_thumb (val))
9597 MSYMBOL_SET_SPECIAL (msym);
9598 }
9599
9600 static void
9601 arm_objfile_data_free (struct objfile *objfile, void *arg)
9602 {
9603 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
9604 unsigned int i;
9605
9606 for (i = 0; i < objfile->obfd->section_count; i++)
9607 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9608 }
9609
9610 static void
9611 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9612 asymbol *sym)
9613 {
9614 const char *name = bfd_asymbol_name (sym);
9615 struct arm_per_objfile *data;
9616 VEC(arm_mapping_symbol_s) **map_p;
9617 struct arm_mapping_symbol new_map_sym;
9618
9619 gdb_assert (name[0] == '$');
9620 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9621 return;
9622
9623 data = (struct arm_per_objfile *) objfile_data (objfile,
9624 arm_objfile_data_key);
9625 if (data == NULL)
9626 {
9627 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9628 struct arm_per_objfile);
9629 set_objfile_data (objfile, arm_objfile_data_key, data);
9630 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9631 objfile->obfd->section_count,
9632 VEC(arm_mapping_symbol_s) *);
9633 }
9634 map_p = &data->section_maps[bfd_get_section (sym)->index];
9635
9636 new_map_sym.value = sym->value;
9637 new_map_sym.type = name[1];
9638
9639 /* Assume that most mapping symbols appear in order of increasing
9640 value. If they were randomly distributed, it would be faster to
9641 always push here and then sort at first use. */
9642 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9643 {
9644 struct arm_mapping_symbol *prev_map_sym;
9645
9646 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9647 if (prev_map_sym->value >= sym->value)
9648 {
9649 unsigned int idx;
9650 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9651 arm_compare_mapping_symbols);
9652 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9653 return;
9654 }
9655 }
9656
9657 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9658 }
9659
9660 static void
9661 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9662 {
9663 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9664 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9665
9666 /* If necessary, set the T bit. */
9667 if (arm_apcs_32)
9668 {
9669 ULONGEST val, t_bit;
9670 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9671 t_bit = arm_psr_thumb_bit (gdbarch);
9672 if (arm_pc_is_thumb (gdbarch, pc))
9673 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9674 val | t_bit);
9675 else
9676 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9677 val & ~t_bit);
9678 }
9679 }
9680
9681 /* Read the contents of a NEON quad register, by reading from two
9682 double registers. This is used to implement the quad pseudo
9683 registers, and for argument passing in case the quad registers are
9684 missing; vectors are passed in quad registers when using the VFP
9685 ABI, even if a NEON unit is not present. REGNUM is the index of
9686 the quad register, in [0, 15]. */
9687
9688 static enum register_status
9689 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9690 int regnum, gdb_byte *buf)
9691 {
9692 char name_buf[4];
9693 gdb_byte reg_buf[8];
9694 int offset, double_regnum;
9695 enum register_status status;
9696
9697 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9698 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9699 strlen (name_buf));
9700
9701 /* d0 is always the least significant half of q0. */
9702 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9703 offset = 8;
9704 else
9705 offset = 0;
9706
9707 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9708 if (status != REG_VALID)
9709 return status;
9710 memcpy (buf + offset, reg_buf, 8);
9711
9712 offset = 8 - offset;
9713 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9714 if (status != REG_VALID)
9715 return status;
9716 memcpy (buf + offset, reg_buf, 8);
9717
9718 return REG_VALID;
9719 }
9720
9721 static enum register_status
9722 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9723 int regnum, gdb_byte *buf)
9724 {
9725 const int num_regs = gdbarch_num_regs (gdbarch);
9726 char name_buf[4];
9727 gdb_byte reg_buf[8];
9728 int offset, double_regnum;
9729
9730 gdb_assert (regnum >= num_regs);
9731 regnum -= num_regs;
9732
9733 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9734 /* Quad-precision register. */
9735 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9736 else
9737 {
9738 enum register_status status;
9739
9740 /* Single-precision register. */
9741 gdb_assert (regnum < 32);
9742
9743 /* s0 is always the least significant half of d0. */
9744 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9745 offset = (regnum & 1) ? 0 : 4;
9746 else
9747 offset = (regnum & 1) ? 4 : 0;
9748
9749 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9750 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9751 strlen (name_buf));
9752
9753 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9754 if (status == REG_VALID)
9755 memcpy (buf, reg_buf + offset, 4);
9756 return status;
9757 }
9758 }
9759
9760 /* Store the contents of BUF to a NEON quad register, by writing to
9761 two double registers. This is used to implement the quad pseudo
9762 registers, and for argument passing in case the quad registers are
9763 missing; vectors are passed in quad registers when using the VFP
9764 ABI, even if a NEON unit is not present. REGNUM is the index
9765 of the quad register, in [0, 15]. */
9766
9767 static void
9768 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9769 int regnum, const gdb_byte *buf)
9770 {
9771 char name_buf[4];
9772 int offset, double_regnum;
9773
9774 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9775 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9776 strlen (name_buf));
9777
9778 /* d0 is always the least significant half of q0. */
9779 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9780 offset = 8;
9781 else
9782 offset = 0;
9783
9784 regcache_raw_write (regcache, double_regnum, buf + offset);
9785 offset = 8 - offset;
9786 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9787 }
9788
9789 static void
9790 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9791 int regnum, const gdb_byte *buf)
9792 {
9793 const int num_regs = gdbarch_num_regs (gdbarch);
9794 char name_buf[4];
9795 gdb_byte reg_buf[8];
9796 int offset, double_regnum;
9797
9798 gdb_assert (regnum >= num_regs);
9799 regnum -= num_regs;
9800
9801 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9802 /* Quad-precision register. */
9803 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9804 else
9805 {
9806 /* Single-precision register. */
9807 gdb_assert (regnum < 32);
9808
9809 /* s0 is always the least significant half of d0. */
9810 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9811 offset = (regnum & 1) ? 0 : 4;
9812 else
9813 offset = (regnum & 1) ? 4 : 0;
9814
9815 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9816 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9817 strlen (name_buf));
9818
9819 regcache_raw_read (regcache, double_regnum, reg_buf);
9820 memcpy (reg_buf + offset, buf, 4);
9821 regcache_raw_write (regcache, double_regnum, reg_buf);
9822 }
9823 }
9824
9825 static struct value *
9826 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9827 {
9828 const int *reg_p = (const int *) baton;
9829 return value_of_register (*reg_p, frame);
9830 }
9831 \f
9832 static enum gdb_osabi
9833 arm_elf_osabi_sniffer (bfd *abfd)
9834 {
9835 unsigned int elfosabi;
9836 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9837
9838 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9839
9840 if (elfosabi == ELFOSABI_ARM)
9841 /* GNU tools use this value. Check note sections in this case,
9842 as well. */
9843 bfd_map_over_sections (abfd,
9844 generic_elf_osabi_sniff_abi_tag_sections,
9845 &osabi);
9846
9847 /* Anything else will be handled by the generic ELF sniffer. */
9848 return osabi;
9849 }
9850
9851 static int
9852 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9853 struct reggroup *group)
9854 {
9855 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9856 this, FPS register belongs to save_regroup, restore_reggroup, and
9857 all_reggroup, of course. */
9858 if (regnum == ARM_FPS_REGNUM)
9859 return (group == float_reggroup
9860 || group == save_reggroup
9861 || group == restore_reggroup
9862 || group == all_reggroup);
9863 else
9864 return default_register_reggroup_p (gdbarch, regnum, group);
9865 }
9866
9867 \f
9868 /* For backward-compatibility we allow two 'g' packet lengths with
9869 the remote protocol depending on whether FPA registers are
9870 supplied. M-profile targets do not have FPA registers, but some
9871 stubs already exist in the wild which use a 'g' packet which
9872 supplies them albeit with dummy values. The packet format which
9873 includes FPA registers should be considered deprecated for
9874 M-profile targets. */
9875
9876 static void
9877 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9878 {
9879 if (gdbarch_tdep (gdbarch)->is_m)
9880 {
9881 /* If we know from the executable this is an M-profile target,
9882 cater for remote targets whose register set layout is the
9883 same as the FPA layout. */
9884 register_remote_g_packet_guess (gdbarch,
9885 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9886 (16 * INT_REGISTER_SIZE)
9887 + (8 * FP_REGISTER_SIZE)
9888 + (2 * INT_REGISTER_SIZE),
9889 tdesc_arm_with_m_fpa_layout);
9890
9891 /* The regular M-profile layout. */
9892 register_remote_g_packet_guess (gdbarch,
9893 /* r0-r12,sp,lr,pc; xpsr */
9894 (16 * INT_REGISTER_SIZE)
9895 + INT_REGISTER_SIZE,
9896 tdesc_arm_with_m);
9897
9898 /* M-profile plus M4F VFP. */
9899 register_remote_g_packet_guess (gdbarch,
9900 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9901 (16 * INT_REGISTER_SIZE)
9902 + (16 * VFP_REGISTER_SIZE)
9903 + (2 * INT_REGISTER_SIZE),
9904 tdesc_arm_with_m_vfp_d16);
9905 }
9906
9907 /* Otherwise we don't have a useful guess. */
9908 }
9909
9910 \f
9911 /* Initialize the current architecture based on INFO. If possible,
9912 re-use an architecture from ARCHES, which is a list of
9913 architectures already created during this debugging session.
9914
9915 Called e.g. at program startup, when reading a core file, and when
9916 reading a binary file. */
9917
9918 static struct gdbarch *
9919 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9920 {
9921 struct gdbarch_tdep *tdep;
9922 struct gdbarch *gdbarch;
9923 struct gdbarch_list *best_arch;
9924 enum arm_abi_kind arm_abi = arm_abi_global;
9925 enum arm_float_model fp_model = arm_fp_model;
9926 struct tdesc_arch_data *tdesc_data = NULL;
9927 int i, is_m = 0;
9928 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9929 int have_wmmx_registers = 0;
9930 int have_neon = 0;
9931 int have_fpa_registers = 1;
9932 const struct target_desc *tdesc = info.target_desc;
9933
9934 /* If we have an object to base this architecture on, try to determine
9935 its ABI. */
9936
9937 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9938 {
9939 int ei_osabi, e_flags;
9940
9941 switch (bfd_get_flavour (info.abfd))
9942 {
9943 case bfd_target_aout_flavour:
9944 /* Assume it's an old APCS-style ABI. */
9945 arm_abi = ARM_ABI_APCS;
9946 break;
9947
9948 case bfd_target_coff_flavour:
9949 /* Assume it's an old APCS-style ABI. */
9950 /* XXX WinCE? */
9951 arm_abi = ARM_ABI_APCS;
9952 break;
9953
9954 case bfd_target_elf_flavour:
9955 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9956 e_flags = elf_elfheader (info.abfd)->e_flags;
9957
9958 if (ei_osabi == ELFOSABI_ARM)
9959 {
9960 /* GNU tools used to use this value, but do not for EABI
9961 objects. There's nowhere to tag an EABI version
9962 anyway, so assume APCS. */
9963 arm_abi = ARM_ABI_APCS;
9964 }
9965 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9966 {
9967 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9968 int attr_arch, attr_profile;
9969
9970 switch (eabi_ver)
9971 {
9972 case EF_ARM_EABI_UNKNOWN:
9973 /* Assume GNU tools. */
9974 arm_abi = ARM_ABI_APCS;
9975 break;
9976
9977 case EF_ARM_EABI_VER4:
9978 case EF_ARM_EABI_VER5:
9979 arm_abi = ARM_ABI_AAPCS;
9980 /* EABI binaries default to VFP float ordering.
9981 They may also contain build attributes that can
9982 be used to identify if the VFP argument-passing
9983 ABI is in use. */
9984 if (fp_model == ARM_FLOAT_AUTO)
9985 {
9986 #ifdef HAVE_ELF
9987 switch (bfd_elf_get_obj_attr_int (info.abfd,
9988 OBJ_ATTR_PROC,
9989 Tag_ABI_VFP_args))
9990 {
9991 case AEABI_VFP_args_base:
9992 /* "The user intended FP parameter/result
9993 passing to conform to AAPCS, base
9994 variant". */
9995 fp_model = ARM_FLOAT_SOFT_VFP;
9996 break;
9997 case AEABI_VFP_args_vfp:
9998 /* "The user intended FP parameter/result
9999 passing to conform to AAPCS, VFP
10000 variant". */
10001 fp_model = ARM_FLOAT_VFP;
10002 break;
10003 case AEABI_VFP_args_toolchain:
10004 /* "The user intended FP parameter/result
10005 passing to conform to tool chain-specific
10006 conventions" - we don't know any such
10007 conventions, so leave it as "auto". */
10008 break;
10009 case AEABI_VFP_args_compatible:
10010 /* "Code is compatible with both the base
10011 and VFP variants; the user did not permit
10012 non-variadic functions to pass FP
10013 parameters/results" - leave it as
10014 "auto". */
10015 break;
10016 default:
10017 /* Attribute value not mentioned in the
10018 November 2012 ABI, so leave it as
10019 "auto". */
10020 break;
10021 }
10022 #else
10023 fp_model = ARM_FLOAT_SOFT_VFP;
10024 #endif
10025 }
10026 break;
10027
10028 default:
10029 /* Leave it as "auto". */
10030 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10031 break;
10032 }
10033
10034 #ifdef HAVE_ELF
10035 /* Detect M-profile programs. This only works if the
10036 executable file includes build attributes; GCC does
10037 copy them to the executable, but e.g. RealView does
10038 not. */
10039 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10040 Tag_CPU_arch);
10041 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10042 OBJ_ATTR_PROC,
10043 Tag_CPU_arch_profile);
10044 /* GCC specifies the profile for v6-M; RealView only
10045 specifies the profile for architectures starting with
10046 V7 (as opposed to architectures with a tag
10047 numerically greater than TAG_CPU_ARCH_V7). */
10048 if (!tdesc_has_registers (tdesc)
10049 && (attr_arch == TAG_CPU_ARCH_V6_M
10050 || attr_arch == TAG_CPU_ARCH_V6S_M
10051 || attr_profile == 'M'))
10052 is_m = 1;
10053 #endif
10054 }
10055
10056 if (fp_model == ARM_FLOAT_AUTO)
10057 {
10058 int e_flags = elf_elfheader (info.abfd)->e_flags;
10059
10060 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10061 {
10062 case 0:
10063 /* Leave it as "auto". Strictly speaking this case
10064 means FPA, but almost nobody uses that now, and
10065 many toolchains fail to set the appropriate bits
10066 for the floating-point model they use. */
10067 break;
10068 case EF_ARM_SOFT_FLOAT:
10069 fp_model = ARM_FLOAT_SOFT_FPA;
10070 break;
10071 case EF_ARM_VFP_FLOAT:
10072 fp_model = ARM_FLOAT_VFP;
10073 break;
10074 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10075 fp_model = ARM_FLOAT_SOFT_VFP;
10076 break;
10077 }
10078 }
10079
10080 if (e_flags & EF_ARM_BE8)
10081 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10082
10083 break;
10084
10085 default:
10086 /* Leave it as "auto". */
10087 break;
10088 }
10089 }
10090
10091 /* Check any target description for validity. */
10092 if (tdesc_has_registers (tdesc))
10093 {
10094 /* For most registers we require GDB's default names; but also allow
10095 the numeric names for sp / lr / pc, as a convenience. */
10096 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10097 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10098 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10099
10100 const struct tdesc_feature *feature;
10101 int valid_p;
10102
10103 feature = tdesc_find_feature (tdesc,
10104 "org.gnu.gdb.arm.core");
10105 if (feature == NULL)
10106 {
10107 feature = tdesc_find_feature (tdesc,
10108 "org.gnu.gdb.arm.m-profile");
10109 if (feature == NULL)
10110 return NULL;
10111 else
10112 is_m = 1;
10113 }
10114
10115 tdesc_data = tdesc_data_alloc ();
10116
10117 valid_p = 1;
10118 for (i = 0; i < ARM_SP_REGNUM; i++)
10119 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10120 arm_register_names[i]);
10121 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10122 ARM_SP_REGNUM,
10123 arm_sp_names);
10124 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10125 ARM_LR_REGNUM,
10126 arm_lr_names);
10127 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10128 ARM_PC_REGNUM,
10129 arm_pc_names);
10130 if (is_m)
10131 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10132 ARM_PS_REGNUM, "xpsr");
10133 else
10134 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10135 ARM_PS_REGNUM, "cpsr");
10136
10137 if (!valid_p)
10138 {
10139 tdesc_data_cleanup (tdesc_data);
10140 return NULL;
10141 }
10142
10143 feature = tdesc_find_feature (tdesc,
10144 "org.gnu.gdb.arm.fpa");
10145 if (feature != NULL)
10146 {
10147 valid_p = 1;
10148 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10149 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10150 arm_register_names[i]);
10151 if (!valid_p)
10152 {
10153 tdesc_data_cleanup (tdesc_data);
10154 return NULL;
10155 }
10156 }
10157 else
10158 have_fpa_registers = 0;
10159
10160 feature = tdesc_find_feature (tdesc,
10161 "org.gnu.gdb.xscale.iwmmxt");
10162 if (feature != NULL)
10163 {
10164 static const char *const iwmmxt_names[] = {
10165 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10166 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10167 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10168 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10169 };
10170
10171 valid_p = 1;
10172 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10173 valid_p
10174 &= tdesc_numbered_register (feature, tdesc_data, i,
10175 iwmmxt_names[i - ARM_WR0_REGNUM]);
10176
10177 /* Check for the control registers, but do not fail if they
10178 are missing. */
10179 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10180 tdesc_numbered_register (feature, tdesc_data, i,
10181 iwmmxt_names[i - ARM_WR0_REGNUM]);
10182
10183 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10184 valid_p
10185 &= tdesc_numbered_register (feature, tdesc_data, i,
10186 iwmmxt_names[i - ARM_WR0_REGNUM]);
10187
10188 if (!valid_p)
10189 {
10190 tdesc_data_cleanup (tdesc_data);
10191 return NULL;
10192 }
10193
10194 have_wmmx_registers = 1;
10195 }
10196
10197 /* If we have a VFP unit, check whether the single precision registers
10198 are present. If not, then we will synthesize them as pseudo
10199 registers. */
10200 feature = tdesc_find_feature (tdesc,
10201 "org.gnu.gdb.arm.vfp");
10202 if (feature != NULL)
10203 {
10204 static const char *const vfp_double_names[] = {
10205 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10206 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10207 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10208 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10209 };
10210
10211 /* Require the double precision registers. There must be either
10212 16 or 32. */
10213 valid_p = 1;
10214 for (i = 0; i < 32; i++)
10215 {
10216 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10217 ARM_D0_REGNUM + i,
10218 vfp_double_names[i]);
10219 if (!valid_p)
10220 break;
10221 }
10222 if (!valid_p && i == 16)
10223 valid_p = 1;
10224
10225 /* Also require FPSCR. */
10226 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10227 ARM_FPSCR_REGNUM, "fpscr");
10228 if (!valid_p)
10229 {
10230 tdesc_data_cleanup (tdesc_data);
10231 return NULL;
10232 }
10233
10234 if (tdesc_unnumbered_register (feature, "s0") == 0)
10235 have_vfp_pseudos = 1;
10236
10237 vfp_register_count = i;
10238
10239 /* If we have VFP, also check for NEON. The architecture allows
10240 NEON without VFP (integer vector operations only), but GDB
10241 does not support that. */
10242 feature = tdesc_find_feature (tdesc,
10243 "org.gnu.gdb.arm.neon");
10244 if (feature != NULL)
10245 {
10246 /* NEON requires 32 double-precision registers. */
10247 if (i != 32)
10248 {
10249 tdesc_data_cleanup (tdesc_data);
10250 return NULL;
10251 }
10252
10253 /* If there are quad registers defined by the stub, use
10254 their type; otherwise (normally) provide them with
10255 the default type. */
10256 if (tdesc_unnumbered_register (feature, "q0") == 0)
10257 have_neon_pseudos = 1;
10258
10259 have_neon = 1;
10260 }
10261 }
10262 }
10263
10264 /* If there is already a candidate, use it. */
10265 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10266 best_arch != NULL;
10267 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10268 {
10269 if (arm_abi != ARM_ABI_AUTO
10270 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10271 continue;
10272
10273 if (fp_model != ARM_FLOAT_AUTO
10274 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10275 continue;
10276
10277 /* There are various other properties in tdep that we do not
10278 need to check here: those derived from a target description,
10279 since gdbarches with a different target description are
10280 automatically disqualified. */
10281
10282 /* Do check is_m, though, since it might come from the binary. */
10283 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10284 continue;
10285
10286 /* Found a match. */
10287 break;
10288 }
10289
10290 if (best_arch != NULL)
10291 {
10292 if (tdesc_data != NULL)
10293 tdesc_data_cleanup (tdesc_data);
10294 return best_arch->gdbarch;
10295 }
10296
10297 tdep = XCNEW (struct gdbarch_tdep);
10298 gdbarch = gdbarch_alloc (&info, tdep);
10299
10300 /* Record additional information about the architecture we are defining.
10301 These are gdbarch discriminators, like the OSABI. */
10302 tdep->arm_abi = arm_abi;
10303 tdep->fp_model = fp_model;
10304 tdep->is_m = is_m;
10305 tdep->have_fpa_registers = have_fpa_registers;
10306 tdep->have_wmmx_registers = have_wmmx_registers;
10307 gdb_assert (vfp_register_count == 0
10308 || vfp_register_count == 16
10309 || vfp_register_count == 32);
10310 tdep->vfp_register_count = vfp_register_count;
10311 tdep->have_vfp_pseudos = have_vfp_pseudos;
10312 tdep->have_neon_pseudos = have_neon_pseudos;
10313 tdep->have_neon = have_neon;
10314
10315 arm_register_g_packet_guesses (gdbarch);
10316
10317 /* Breakpoints. */
10318 switch (info.byte_order_for_code)
10319 {
10320 case BFD_ENDIAN_BIG:
10321 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10322 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10323 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10324 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10325
10326 break;
10327
10328 case BFD_ENDIAN_LITTLE:
10329 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10330 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10331 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10332 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10333
10334 break;
10335
10336 default:
10337 internal_error (__FILE__, __LINE__,
10338 _("arm_gdbarch_init: bad byte order for float format"));
10339 }
10340
10341 /* On ARM targets char defaults to unsigned. */
10342 set_gdbarch_char_signed (gdbarch, 0);
10343
10344 /* Note: for displaced stepping, this includes the breakpoint, and one word
10345 of additional scratch space. This setting isn't used for anything beside
10346 displaced stepping at present. */
10347 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10348
10349 /* This should be low enough for everything. */
10350 tdep->lowest_pc = 0x20;
10351 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10352
10353 /* The default, for both APCS and AAPCS, is to return small
10354 structures in registers. */
10355 tdep->struct_return = reg_struct_return;
10356
10357 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10358 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10359
10360 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10361
10362 /* Frame handling. */
10363 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10364 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10365 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10366
10367 frame_base_set_default (gdbarch, &arm_normal_base);
10368
10369 /* Address manipulation. */
10370 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10371
10372 /* Advance PC across function entry code. */
10373 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10374
10375 /* Detect whether PC is at a point where the stack has been destroyed. */
10376 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10377
10378 /* Skip trampolines. */
10379 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10380
10381 /* The stack grows downward. */
10382 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10383
10384 /* Breakpoint manipulation. */
10385 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10386 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10387 arm_remote_breakpoint_from_pc);
10388
10389 /* Information about registers, etc. */
10390 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10391 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10392 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10393 set_gdbarch_register_type (gdbarch, arm_register_type);
10394 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10395
10396 /* This "info float" is FPA-specific. Use the generic version if we
10397 do not have FPA. */
10398 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10399 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10400
10401 /* Internal <-> external register number maps. */
10402 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10403 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10404
10405 set_gdbarch_register_name (gdbarch, arm_register_name);
10406
10407 /* Returning results. */
10408 set_gdbarch_return_value (gdbarch, arm_return_value);
10409
10410 /* Disassembly. */
10411 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10412
10413 /* Minsymbol frobbing. */
10414 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10415 set_gdbarch_coff_make_msymbol_special (gdbarch,
10416 arm_coff_make_msymbol_special);
10417 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10418
10419 /* Thumb-2 IT block support. */
10420 set_gdbarch_adjust_breakpoint_address (gdbarch,
10421 arm_adjust_breakpoint_address);
10422
10423 /* Virtual tables. */
10424 set_gdbarch_vbit_in_delta (gdbarch, 1);
10425
10426 /* Hook in the ABI-specific overrides, if they have been registered. */
10427 gdbarch_init_osabi (info, gdbarch);
10428
10429 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10430
10431 /* Add some default predicates. */
10432 if (is_m)
10433 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10434 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10435 dwarf2_append_unwinders (gdbarch);
10436 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10437 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10438
10439 /* Now we have tuned the configuration, set a few final things,
10440 based on what the OS ABI has told us. */
10441
10442 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10443 binaries are always marked. */
10444 if (tdep->arm_abi == ARM_ABI_AUTO)
10445 tdep->arm_abi = ARM_ABI_APCS;
10446
10447 /* Watchpoints are not steppable. */
10448 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10449
10450 /* We used to default to FPA for generic ARM, but almost nobody
10451 uses that now, and we now provide a way for the user to force
10452 the model. So default to the most useful variant. */
10453 if (tdep->fp_model == ARM_FLOAT_AUTO)
10454 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10455
10456 if (tdep->jb_pc >= 0)
10457 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10458
10459 /* Floating point sizes and format. */
10460 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10461 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10462 {
10463 set_gdbarch_double_format
10464 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10465 set_gdbarch_long_double_format
10466 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10467 }
10468 else
10469 {
10470 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10471 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10472 }
10473
10474 if (have_vfp_pseudos)
10475 {
10476 /* NOTE: These are the only pseudo registers used by
10477 the ARM target at the moment. If more are added, a
10478 little more care in numbering will be needed. */
10479
10480 int num_pseudos = 32;
10481 if (have_neon_pseudos)
10482 num_pseudos += 16;
10483 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10484 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10485 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10486 }
10487
10488 if (tdesc_data)
10489 {
10490 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10491
10492 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10493
10494 /* Override tdesc_register_type to adjust the types of VFP
10495 registers for NEON. */
10496 set_gdbarch_register_type (gdbarch, arm_register_type);
10497 }
10498
10499 /* Add standard register aliases. We add aliases even for those
10500 nanes which are used by the current architecture - it's simpler,
10501 and does no harm, since nothing ever lists user registers. */
10502 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10503 user_reg_add (gdbarch, arm_register_aliases[i].name,
10504 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10505
10506 return gdbarch;
10507 }
10508
10509 static void
10510 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10511 {
10512 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10513
10514 if (tdep == NULL)
10515 return;
10516
10517 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10518 (unsigned long) tdep->lowest_pc);
10519 }
10520
10521 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10522
10523 void
10524 _initialize_arm_tdep (void)
10525 {
10526 struct ui_file *stb;
10527 long length;
10528 struct cmd_list_element *new_set, *new_show;
10529 const char *setname;
10530 const char *setdesc;
10531 const char *const *regnames;
10532 int numregs, i, j;
10533 static char *helptext;
10534 char regdesc[1024], *rdptr = regdesc;
10535 size_t rest = sizeof (regdesc);
10536
10537 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10538
10539 arm_objfile_data_key
10540 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10541
10542 /* Add ourselves to objfile event chain. */
10543 observer_attach_new_objfile (arm_exidx_new_objfile);
10544 arm_exidx_data_key
10545 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10546
10547 /* Register an ELF OS ABI sniffer for ARM binaries. */
10548 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10549 bfd_target_elf_flavour,
10550 arm_elf_osabi_sniffer);
10551
10552 /* Initialize the standard target descriptions. */
10553 initialize_tdesc_arm_with_m ();
10554 initialize_tdesc_arm_with_m_fpa_layout ();
10555 initialize_tdesc_arm_with_m_vfp_d16 ();
10556 initialize_tdesc_arm_with_iwmmxt ();
10557 initialize_tdesc_arm_with_vfpv2 ();
10558 initialize_tdesc_arm_with_vfpv3 ();
10559 initialize_tdesc_arm_with_neon ();
10560
10561 /* Get the number of possible sets of register names defined in opcodes. */
10562 num_disassembly_options = get_arm_regname_num_options ();
10563
10564 /* Add root prefix command for all "set arm"/"show arm" commands. */
10565 add_prefix_cmd ("arm", no_class, set_arm_command,
10566 _("Various ARM-specific commands."),
10567 &setarmcmdlist, "set arm ", 0, &setlist);
10568
10569 add_prefix_cmd ("arm", no_class, show_arm_command,
10570 _("Various ARM-specific commands."),
10571 &showarmcmdlist, "show arm ", 0, &showlist);
10572
10573 /* Sync the opcode insn printer with our register viewer. */
10574 parse_arm_disassembler_option ("reg-names-std");
10575
10576 /* Initialize the array that will be passed to
10577 add_setshow_enum_cmd(). */
10578 valid_disassembly_styles = XNEWVEC (const char *,
10579 num_disassembly_options + 1);
10580 for (i = 0; i < num_disassembly_options; i++)
10581 {
10582 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10583 valid_disassembly_styles[i] = setname;
10584 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10585 rdptr += length;
10586 rest -= length;
10587 /* When we find the default names, tell the disassembler to use
10588 them. */
10589 if (!strcmp (setname, "std"))
10590 {
10591 disassembly_style = setname;
10592 set_arm_regname_option (i);
10593 }
10594 }
10595 /* Mark the end of valid options. */
10596 valid_disassembly_styles[num_disassembly_options] = NULL;
10597
10598 /* Create the help text. */
10599 stb = mem_fileopen ();
10600 fprintf_unfiltered (stb, "%s%s%s",
10601 _("The valid values are:\n"),
10602 regdesc,
10603 _("The default is \"std\"."));
10604 helptext = ui_file_xstrdup (stb, NULL);
10605 ui_file_delete (stb);
10606
10607 add_setshow_enum_cmd("disassembler", no_class,
10608 valid_disassembly_styles, &disassembly_style,
10609 _("Set the disassembly style."),
10610 _("Show the disassembly style."),
10611 helptext,
10612 set_disassembly_style_sfunc,
10613 NULL, /* FIXME: i18n: The disassembly style is
10614 \"%s\". */
10615 &setarmcmdlist, &showarmcmdlist);
10616
10617 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10618 _("Set usage of ARM 32-bit mode."),
10619 _("Show usage of ARM 32-bit mode."),
10620 _("When off, a 26-bit PC will be used."),
10621 NULL,
10622 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10623 mode is %s. */
10624 &setarmcmdlist, &showarmcmdlist);
10625
10626 /* Add a command to allow the user to force the FPU model. */
10627 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10628 _("Set the floating point type."),
10629 _("Show the floating point type."),
10630 _("auto - Determine the FP typefrom the OS-ABI.\n\
10631 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10632 fpa - FPA co-processor (GCC compiled).\n\
10633 softvfp - Software FP with pure-endian doubles.\n\
10634 vfp - VFP co-processor."),
10635 set_fp_model_sfunc, show_fp_model,
10636 &setarmcmdlist, &showarmcmdlist);
10637
10638 /* Add a command to allow the user to force the ABI. */
10639 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10640 _("Set the ABI."),
10641 _("Show the ABI."),
10642 NULL, arm_set_abi, arm_show_abi,
10643 &setarmcmdlist, &showarmcmdlist);
10644
10645 /* Add two commands to allow the user to force the assumed
10646 execution mode. */
10647 add_setshow_enum_cmd ("fallback-mode", class_support,
10648 arm_mode_strings, &arm_fallback_mode_string,
10649 _("Set the mode assumed when symbols are unavailable."),
10650 _("Show the mode assumed when symbols are unavailable."),
10651 NULL, NULL, arm_show_fallback_mode,
10652 &setarmcmdlist, &showarmcmdlist);
10653 add_setshow_enum_cmd ("force-mode", class_support,
10654 arm_mode_strings, &arm_force_mode_string,
10655 _("Set the mode assumed even when symbols are available."),
10656 _("Show the mode assumed even when symbols are available."),
10657 NULL, NULL, arm_show_force_mode,
10658 &setarmcmdlist, &showarmcmdlist);
10659
10660 /* Debugging flag. */
10661 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10662 _("Set ARM debugging."),
10663 _("Show ARM debugging."),
10664 _("When on, arm-specific debugging is enabled."),
10665 NULL,
10666 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10667 &setdebuglist, &showdebuglist);
10668 }
10669
10670 /* ARM-reversible process record data structures. */
10671
10672 #define ARM_INSN_SIZE_BYTES 4
10673 #define THUMB_INSN_SIZE_BYTES 2
10674 #define THUMB2_INSN_SIZE_BYTES 4
10675
10676
10677 /* Position of the bit within a 32-bit ARM instruction
10678 that defines whether the instruction is a load or store. */
10679 #define INSN_S_L_BIT_NUM 20
10680
10681 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10682 do \
10683 { \
10684 unsigned int reg_len = LENGTH; \
10685 if (reg_len) \
10686 { \
10687 REGS = XNEWVEC (uint32_t, reg_len); \
10688 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10689 } \
10690 } \
10691 while (0)
10692
10693 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10694 do \
10695 { \
10696 unsigned int mem_len = LENGTH; \
10697 if (mem_len) \
10698 { \
10699 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10700 memcpy(&MEMS->len, &RECORD_BUF[0], \
10701 sizeof(struct arm_mem_r) * LENGTH); \
10702 } \
10703 } \
10704 while (0)
10705
10706 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10707 #define INSN_RECORDED(ARM_RECORD) \
10708 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10709
10710 /* ARM memory record structure. */
10711 struct arm_mem_r
10712 {
10713 uint32_t len; /* Record length. */
10714 uint32_t addr; /* Memory address. */
10715 };
10716
10717 /* ARM instruction record contains opcode of current insn
10718 and execution state (before entry to decode_insn()),
10719 contains list of to-be-modified registers and
10720 memory blocks (on return from decode_insn()). */
10721
10722 typedef struct insn_decode_record_t
10723 {
10724 struct gdbarch *gdbarch;
10725 struct regcache *regcache;
10726 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10727 uint32_t arm_insn; /* Should accommodate thumb. */
10728 uint32_t cond; /* Condition code. */
10729 uint32_t opcode; /* Insn opcode. */
10730 uint32_t decode; /* Insn decode bits. */
10731 uint32_t mem_rec_count; /* No of mem records. */
10732 uint32_t reg_rec_count; /* No of reg records. */
10733 uint32_t *arm_regs; /* Registers to be saved for this record. */
10734 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10735 } insn_decode_record;
10736
10737
10738 /* Checks ARM SBZ and SBO mandatory fields. */
10739
10740 static int
10741 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10742 {
10743 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10744
10745 if (!len)
10746 return 1;
10747
10748 if (!sbo)
10749 ones = ~ones;
10750
10751 while (ones)
10752 {
10753 if (!(ones & sbo))
10754 {
10755 return 0;
10756 }
10757 ones = ones >> 1;
10758 }
10759 return 1;
10760 }
10761
10762 enum arm_record_result
10763 {
10764 ARM_RECORD_SUCCESS = 0,
10765 ARM_RECORD_FAILURE = 1
10766 };
10767
10768 typedef enum
10769 {
10770 ARM_RECORD_STRH=1,
10771 ARM_RECORD_STRD
10772 } arm_record_strx_t;
10773
10774 typedef enum
10775 {
10776 ARM_RECORD=1,
10777 THUMB_RECORD,
10778 THUMB2_RECORD
10779 } record_type_t;
10780
10781
10782 static int
10783 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10784 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10785 {
10786
10787 struct regcache *reg_cache = arm_insn_r->regcache;
10788 ULONGEST u_regval[2]= {0};
10789
10790 uint32_t reg_src1 = 0, reg_src2 = 0;
10791 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10792 uint32_t opcode1 = 0;
10793
10794 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10795 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10796 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10797
10798
10799 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10800 {
10801 /* 1) Handle misc store, immediate offset. */
10802 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10803 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10804 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10805 regcache_raw_read_unsigned (reg_cache, reg_src1,
10806 &u_regval[0]);
10807 if (ARM_PC_REGNUM == reg_src1)
10808 {
10809 /* If R15 was used as Rn, hence current PC+8. */
10810 u_regval[0] = u_regval[0] + 8;
10811 }
10812 offset_8 = (immed_high << 4) | immed_low;
10813 /* Calculate target store address. */
10814 if (14 == arm_insn_r->opcode)
10815 {
10816 tgt_mem_addr = u_regval[0] + offset_8;
10817 }
10818 else
10819 {
10820 tgt_mem_addr = u_regval[0] - offset_8;
10821 }
10822 if (ARM_RECORD_STRH == str_type)
10823 {
10824 record_buf_mem[0] = 2;
10825 record_buf_mem[1] = tgt_mem_addr;
10826 arm_insn_r->mem_rec_count = 1;
10827 }
10828 else if (ARM_RECORD_STRD == str_type)
10829 {
10830 record_buf_mem[0] = 4;
10831 record_buf_mem[1] = tgt_mem_addr;
10832 record_buf_mem[2] = 4;
10833 record_buf_mem[3] = tgt_mem_addr + 4;
10834 arm_insn_r->mem_rec_count = 2;
10835 }
10836 }
10837 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10838 {
10839 /* 2) Store, register offset. */
10840 /* Get Rm. */
10841 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10842 /* Get Rn. */
10843 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10844 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10845 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10846 if (15 == reg_src2)
10847 {
10848 /* If R15 was used as Rn, hence current PC+8. */
10849 u_regval[0] = u_regval[0] + 8;
10850 }
10851 /* Calculate target store address, Rn +/- Rm, register offset. */
10852 if (12 == arm_insn_r->opcode)
10853 {
10854 tgt_mem_addr = u_regval[0] + u_regval[1];
10855 }
10856 else
10857 {
10858 tgt_mem_addr = u_regval[1] - u_regval[0];
10859 }
10860 if (ARM_RECORD_STRH == str_type)
10861 {
10862 record_buf_mem[0] = 2;
10863 record_buf_mem[1] = tgt_mem_addr;
10864 arm_insn_r->mem_rec_count = 1;
10865 }
10866 else if (ARM_RECORD_STRD == str_type)
10867 {
10868 record_buf_mem[0] = 4;
10869 record_buf_mem[1] = tgt_mem_addr;
10870 record_buf_mem[2] = 4;
10871 record_buf_mem[3] = tgt_mem_addr + 4;
10872 arm_insn_r->mem_rec_count = 2;
10873 }
10874 }
10875 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10876 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10877 {
10878 /* 3) Store, immediate pre-indexed. */
10879 /* 5) Store, immediate post-indexed. */
10880 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10881 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10882 offset_8 = (immed_high << 4) | immed_low;
10883 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10884 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10885 /* Calculate target store address, Rn +/- Rm, register offset. */
10886 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10887 {
10888 tgt_mem_addr = u_regval[0] + offset_8;
10889 }
10890 else
10891 {
10892 tgt_mem_addr = u_regval[0] - offset_8;
10893 }
10894 if (ARM_RECORD_STRH == str_type)
10895 {
10896 record_buf_mem[0] = 2;
10897 record_buf_mem[1] = tgt_mem_addr;
10898 arm_insn_r->mem_rec_count = 1;
10899 }
10900 else if (ARM_RECORD_STRD == str_type)
10901 {
10902 record_buf_mem[0] = 4;
10903 record_buf_mem[1] = tgt_mem_addr;
10904 record_buf_mem[2] = 4;
10905 record_buf_mem[3] = tgt_mem_addr + 4;
10906 arm_insn_r->mem_rec_count = 2;
10907 }
10908 /* Record Rn also as it changes. */
10909 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10910 arm_insn_r->reg_rec_count = 1;
10911 }
10912 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10913 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10914 {
10915 /* 4) Store, register pre-indexed. */
10916 /* 6) Store, register post -indexed. */
10917 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10918 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10919 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10920 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10921 /* Calculate target store address, Rn +/- Rm, register offset. */
10922 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10923 {
10924 tgt_mem_addr = u_regval[0] + u_regval[1];
10925 }
10926 else
10927 {
10928 tgt_mem_addr = u_regval[1] - u_regval[0];
10929 }
10930 if (ARM_RECORD_STRH == str_type)
10931 {
10932 record_buf_mem[0] = 2;
10933 record_buf_mem[1] = tgt_mem_addr;
10934 arm_insn_r->mem_rec_count = 1;
10935 }
10936 else if (ARM_RECORD_STRD == str_type)
10937 {
10938 record_buf_mem[0] = 4;
10939 record_buf_mem[1] = tgt_mem_addr;
10940 record_buf_mem[2] = 4;
10941 record_buf_mem[3] = tgt_mem_addr + 4;
10942 arm_insn_r->mem_rec_count = 2;
10943 }
10944 /* Record Rn also as it changes. */
10945 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10946 arm_insn_r->reg_rec_count = 1;
10947 }
10948 return 0;
10949 }
10950
10951 /* Handling ARM extension space insns. */
10952
10953 static int
10954 arm_record_extension_space (insn_decode_record *arm_insn_r)
10955 {
10956 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10957 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10958 uint32_t record_buf[8], record_buf_mem[8];
10959 uint32_t reg_src1 = 0;
10960 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10961 struct regcache *reg_cache = arm_insn_r->regcache;
10962 ULONGEST u_regval = 0;
10963
10964 gdb_assert (!INSN_RECORDED(arm_insn_r));
10965 /* Handle unconditional insn extension space. */
10966
10967 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10968 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10969 if (arm_insn_r->cond)
10970 {
10971 /* PLD has no affect on architectural state, it just affects
10972 the caches. */
10973 if (5 == ((opcode1 & 0xE0) >> 5))
10974 {
10975 /* BLX(1) */
10976 record_buf[0] = ARM_PS_REGNUM;
10977 record_buf[1] = ARM_LR_REGNUM;
10978 arm_insn_r->reg_rec_count = 2;
10979 }
10980 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10981 }
10982
10983
10984 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10985 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10986 {
10987 ret = -1;
10988 /* Undefined instruction on ARM V5; need to handle if later
10989 versions define it. */
10990 }
10991
10992 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10993 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10994 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10995
10996 /* Handle arithmetic insn extension space. */
10997 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10998 && !INSN_RECORDED(arm_insn_r))
10999 {
11000 /* Handle MLA(S) and MUL(S). */
11001 if (0 <= insn_op1 && 3 >= insn_op1)
11002 {
11003 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11004 record_buf[1] = ARM_PS_REGNUM;
11005 arm_insn_r->reg_rec_count = 2;
11006 }
11007 else if (4 <= insn_op1 && 15 >= insn_op1)
11008 {
11009 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11010 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11011 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11012 record_buf[2] = ARM_PS_REGNUM;
11013 arm_insn_r->reg_rec_count = 3;
11014 }
11015 }
11016
11017 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11018 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11019 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11020
11021 /* Handle control insn extension space. */
11022
11023 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11024 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11025 {
11026 if (!bit (arm_insn_r->arm_insn,25))
11027 {
11028 if (!bits (arm_insn_r->arm_insn, 4, 7))
11029 {
11030 if ((0 == insn_op1) || (2 == insn_op1))
11031 {
11032 /* MRS. */
11033 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11034 arm_insn_r->reg_rec_count = 1;
11035 }
11036 else if (1 == insn_op1)
11037 {
11038 /* CSPR is going to be changed. */
11039 record_buf[0] = ARM_PS_REGNUM;
11040 arm_insn_r->reg_rec_count = 1;
11041 }
11042 else if (3 == insn_op1)
11043 {
11044 /* SPSR is going to be changed. */
11045 /* We need to get SPSR value, which is yet to be done. */
11046 printf_unfiltered (_("Process record does not support "
11047 "instruction 0x%0x at address %s.\n"),
11048 arm_insn_r->arm_insn,
11049 paddress (arm_insn_r->gdbarch,
11050 arm_insn_r->this_addr));
11051 return -1;
11052 }
11053 }
11054 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11055 {
11056 if (1 == insn_op1)
11057 {
11058 /* BX. */
11059 record_buf[0] = ARM_PS_REGNUM;
11060 arm_insn_r->reg_rec_count = 1;
11061 }
11062 else if (3 == insn_op1)
11063 {
11064 /* CLZ. */
11065 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11066 arm_insn_r->reg_rec_count = 1;
11067 }
11068 }
11069 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11070 {
11071 /* BLX. */
11072 record_buf[0] = ARM_PS_REGNUM;
11073 record_buf[1] = ARM_LR_REGNUM;
11074 arm_insn_r->reg_rec_count = 2;
11075 }
11076 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11077 {
11078 /* QADD, QSUB, QDADD, QDSUB */
11079 record_buf[0] = ARM_PS_REGNUM;
11080 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11081 arm_insn_r->reg_rec_count = 2;
11082 }
11083 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11084 {
11085 /* BKPT. */
11086 record_buf[0] = ARM_PS_REGNUM;
11087 record_buf[1] = ARM_LR_REGNUM;
11088 arm_insn_r->reg_rec_count = 2;
11089
11090 /* Save SPSR also;how? */
11091 printf_unfiltered (_("Process record does not support "
11092 "instruction 0x%0x at address %s.\n"),
11093 arm_insn_r->arm_insn,
11094 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11095 return -1;
11096 }
11097 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11098 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11099 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11100 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11101 )
11102 {
11103 if (0 == insn_op1 || 1 == insn_op1)
11104 {
11105 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11106 /* We dont do optimization for SMULW<y> where we
11107 need only Rd. */
11108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11109 record_buf[1] = ARM_PS_REGNUM;
11110 arm_insn_r->reg_rec_count = 2;
11111 }
11112 else if (2 == insn_op1)
11113 {
11114 /* SMLAL<x><y>. */
11115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11116 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11117 arm_insn_r->reg_rec_count = 2;
11118 }
11119 else if (3 == insn_op1)
11120 {
11121 /* SMUL<x><y>. */
11122 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11123 arm_insn_r->reg_rec_count = 1;
11124 }
11125 }
11126 }
11127 else
11128 {
11129 /* MSR : immediate form. */
11130 if (1 == insn_op1)
11131 {
11132 /* CSPR is going to be changed. */
11133 record_buf[0] = ARM_PS_REGNUM;
11134 arm_insn_r->reg_rec_count = 1;
11135 }
11136 else if (3 == insn_op1)
11137 {
11138 /* SPSR is going to be changed. */
11139 /* we need to get SPSR value, which is yet to be done */
11140 printf_unfiltered (_("Process record does not support "
11141 "instruction 0x%0x at address %s.\n"),
11142 arm_insn_r->arm_insn,
11143 paddress (arm_insn_r->gdbarch,
11144 arm_insn_r->this_addr));
11145 return -1;
11146 }
11147 }
11148 }
11149
11150 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11151 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11152 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11153
11154 /* Handle load/store insn extension space. */
11155
11156 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11157 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11158 && !INSN_RECORDED(arm_insn_r))
11159 {
11160 /* SWP/SWPB. */
11161 if (0 == insn_op1)
11162 {
11163 /* These insn, changes register and memory as well. */
11164 /* SWP or SWPB insn. */
11165 /* Get memory address given by Rn. */
11166 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11167 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11168 /* SWP insn ?, swaps word. */
11169 if (8 == arm_insn_r->opcode)
11170 {
11171 record_buf_mem[0] = 4;
11172 }
11173 else
11174 {
11175 /* SWPB insn, swaps only byte. */
11176 record_buf_mem[0] = 1;
11177 }
11178 record_buf_mem[1] = u_regval;
11179 arm_insn_r->mem_rec_count = 1;
11180 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11181 arm_insn_r->reg_rec_count = 1;
11182 }
11183 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11184 {
11185 /* STRH. */
11186 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11187 ARM_RECORD_STRH);
11188 }
11189 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11190 {
11191 /* LDRD. */
11192 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11193 record_buf[1] = record_buf[0] + 1;
11194 arm_insn_r->reg_rec_count = 2;
11195 }
11196 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11197 {
11198 /* STRD. */
11199 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11200 ARM_RECORD_STRD);
11201 }
11202 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11203 {
11204 /* LDRH, LDRSB, LDRSH. */
11205 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11206 arm_insn_r->reg_rec_count = 1;
11207 }
11208
11209 }
11210
11211 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11212 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11213 && !INSN_RECORDED(arm_insn_r))
11214 {
11215 ret = -1;
11216 /* Handle coprocessor insn extension space. */
11217 }
11218
11219 /* To be done for ARMv5 and later; as of now we return -1. */
11220 if (-1 == ret)
11221 printf_unfiltered (_("Process record does not support instruction x%0x "
11222 "at address %s.\n"),arm_insn_r->arm_insn,
11223 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11224
11225
11226 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11227 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11228
11229 return ret;
11230 }
11231
11232 /* Handling opcode 000 insns. */
11233
11234 static int
11235 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11236 {
11237 struct regcache *reg_cache = arm_insn_r->regcache;
11238 uint32_t record_buf[8], record_buf_mem[8];
11239 ULONGEST u_regval[2] = {0};
11240
11241 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11242 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11243 uint32_t opcode1 = 0;
11244
11245 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11246 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11247 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11248
11249 /* Data processing insn /multiply insn. */
11250 if (9 == arm_insn_r->decode
11251 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11252 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11253 {
11254 /* Handle multiply instructions. */
11255 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11256 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11257 {
11258 /* Handle MLA and MUL. */
11259 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11260 record_buf[1] = ARM_PS_REGNUM;
11261 arm_insn_r->reg_rec_count = 2;
11262 }
11263 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11264 {
11265 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11266 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11267 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11268 record_buf[2] = ARM_PS_REGNUM;
11269 arm_insn_r->reg_rec_count = 3;
11270 }
11271 }
11272 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11273 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11274 {
11275 /* Handle misc load insns, as 20th bit (L = 1). */
11276 /* LDR insn has a capability to do branching, if
11277 MOV LR, PC is precceded by LDR insn having Rn as R15
11278 in that case, it emulates branch and link insn, and hence we
11279 need to save CSPR and PC as well. I am not sure this is right
11280 place; as opcode = 010 LDR insn make this happen, if R15 was
11281 used. */
11282 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11283 if (15 != reg_dest)
11284 {
11285 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11286 arm_insn_r->reg_rec_count = 1;
11287 }
11288 else
11289 {
11290 record_buf[0] = reg_dest;
11291 record_buf[1] = ARM_PS_REGNUM;
11292 arm_insn_r->reg_rec_count = 2;
11293 }
11294 }
11295 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11296 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11297 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11298 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11299 {
11300 /* Handle MSR insn. */
11301 if (9 == arm_insn_r->opcode)
11302 {
11303 /* CSPR is going to be changed. */
11304 record_buf[0] = ARM_PS_REGNUM;
11305 arm_insn_r->reg_rec_count = 1;
11306 }
11307 else
11308 {
11309 /* SPSR is going to be changed. */
11310 /* How to read SPSR value? */
11311 printf_unfiltered (_("Process record does not support instruction "
11312 "0x%0x at address %s.\n"),
11313 arm_insn_r->arm_insn,
11314 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11315 return -1;
11316 }
11317 }
11318 else if (9 == arm_insn_r->decode
11319 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11320 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11321 {
11322 /* Handling SWP, SWPB. */
11323 /* These insn, changes register and memory as well. */
11324 /* SWP or SWPB insn. */
11325
11326 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11327 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11328 /* SWP insn ?, swaps word. */
11329 if (8 == arm_insn_r->opcode)
11330 {
11331 record_buf_mem[0] = 4;
11332 }
11333 else
11334 {
11335 /* SWPB insn, swaps only byte. */
11336 record_buf_mem[0] = 1;
11337 }
11338 record_buf_mem[1] = u_regval[0];
11339 arm_insn_r->mem_rec_count = 1;
11340 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11341 arm_insn_r->reg_rec_count = 1;
11342 }
11343 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11344 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11345 {
11346 /* Handle BLX, branch and link/exchange. */
11347 if (9 == arm_insn_r->opcode)
11348 {
11349 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11350 and R14 stores the return address. */
11351 record_buf[0] = ARM_PS_REGNUM;
11352 record_buf[1] = ARM_LR_REGNUM;
11353 arm_insn_r->reg_rec_count = 2;
11354 }
11355 }
11356 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11357 {
11358 /* Handle enhanced software breakpoint insn, BKPT. */
11359 /* CPSR is changed to be executed in ARM state, disabling normal
11360 interrupts, entering abort mode. */
11361 /* According to high vector configuration PC is set. */
11362 /* user hit breakpoint and type reverse, in
11363 that case, we need to go back with previous CPSR and
11364 Program Counter. */
11365 record_buf[0] = ARM_PS_REGNUM;
11366 record_buf[1] = ARM_LR_REGNUM;
11367 arm_insn_r->reg_rec_count = 2;
11368
11369 /* Save SPSR also; how? */
11370 printf_unfiltered (_("Process record does not support instruction "
11371 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11372 paddress (arm_insn_r->gdbarch,
11373 arm_insn_r->this_addr));
11374 return -1;
11375 }
11376 else if (11 == arm_insn_r->decode
11377 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11378 {
11379 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11380
11381 /* Handle str(x) insn */
11382 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11383 ARM_RECORD_STRH);
11384 }
11385 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11386 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11387 {
11388 /* Handle BX, branch and link/exchange. */
11389 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11390 record_buf[0] = ARM_PS_REGNUM;
11391 arm_insn_r->reg_rec_count = 1;
11392 }
11393 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11394 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11395 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11396 {
11397 /* Count leading zeros: CLZ. */
11398 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11399 arm_insn_r->reg_rec_count = 1;
11400 }
11401 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11402 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11403 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11404 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11405 )
11406 {
11407 /* Handle MRS insn. */
11408 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11409 arm_insn_r->reg_rec_count = 1;
11410 }
11411 else if (arm_insn_r->opcode <= 15)
11412 {
11413 /* Normal data processing insns. */
11414 /* Out of 11 shifter operands mode, all the insn modifies destination
11415 register, which is specified by 13-16 decode. */
11416 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11417 record_buf[1] = ARM_PS_REGNUM;
11418 arm_insn_r->reg_rec_count = 2;
11419 }
11420 else
11421 {
11422 return -1;
11423 }
11424
11425 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11426 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11427 return 0;
11428 }
11429
11430 /* Handling opcode 001 insns. */
11431
11432 static int
11433 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11434 {
11435 uint32_t record_buf[8], record_buf_mem[8];
11436
11437 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11438 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11439
11440 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11441 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11442 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11443 )
11444 {
11445 /* Handle MSR insn. */
11446 if (9 == arm_insn_r->opcode)
11447 {
11448 /* CSPR is going to be changed. */
11449 record_buf[0] = ARM_PS_REGNUM;
11450 arm_insn_r->reg_rec_count = 1;
11451 }
11452 else
11453 {
11454 /* SPSR is going to be changed. */
11455 }
11456 }
11457 else if (arm_insn_r->opcode <= 15)
11458 {
11459 /* Normal data processing insns. */
11460 /* Out of 11 shifter operands mode, all the insn modifies destination
11461 register, which is specified by 13-16 decode. */
11462 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11463 record_buf[1] = ARM_PS_REGNUM;
11464 arm_insn_r->reg_rec_count = 2;
11465 }
11466 else
11467 {
11468 return -1;
11469 }
11470
11471 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11472 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11473 return 0;
11474 }
11475
11476 /* Handle ARM mode instructions with opcode 010. */
11477
11478 static int
11479 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11480 {
11481 struct regcache *reg_cache = arm_insn_r->regcache;
11482
11483 uint32_t reg_base , reg_dest;
11484 uint32_t offset_12, tgt_mem_addr;
11485 uint32_t record_buf[8], record_buf_mem[8];
11486 unsigned char wback;
11487 ULONGEST u_regval;
11488
11489 /* Calculate wback. */
11490 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11491 || (bit (arm_insn_r->arm_insn, 21) == 1);
11492
11493 arm_insn_r->reg_rec_count = 0;
11494 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11495
11496 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11497 {
11498 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11499 and LDRT. */
11500
11501 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11502 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11503
11504 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11505 preceeds a LDR instruction having R15 as reg_base, it
11506 emulates a branch and link instruction, and hence we need to save
11507 CPSR and PC as well. */
11508 if (ARM_PC_REGNUM == reg_dest)
11509 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11510
11511 /* If wback is true, also save the base register, which is going to be
11512 written to. */
11513 if (wback)
11514 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11515 }
11516 else
11517 {
11518 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11519
11520 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11521 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11522
11523 /* Handle bit U. */
11524 if (bit (arm_insn_r->arm_insn, 23))
11525 {
11526 /* U == 1: Add the offset. */
11527 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11528 }
11529 else
11530 {
11531 /* U == 0: subtract the offset. */
11532 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11533 }
11534
11535 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11536 bytes. */
11537 if (bit (arm_insn_r->arm_insn, 22))
11538 {
11539 /* STRB and STRBT: 1 byte. */
11540 record_buf_mem[0] = 1;
11541 }
11542 else
11543 {
11544 /* STR and STRT: 4 bytes. */
11545 record_buf_mem[0] = 4;
11546 }
11547
11548 /* Handle bit P. */
11549 if (bit (arm_insn_r->arm_insn, 24))
11550 record_buf_mem[1] = tgt_mem_addr;
11551 else
11552 record_buf_mem[1] = (uint32_t) u_regval;
11553
11554 arm_insn_r->mem_rec_count = 1;
11555
11556 /* If wback is true, also save the base register, which is going to be
11557 written to. */
11558 if (wback)
11559 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11560 }
11561
11562 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11563 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11564 return 0;
11565 }
11566
11567 /* Handling opcode 011 insns. */
11568
11569 static int
11570 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11571 {
11572 struct regcache *reg_cache = arm_insn_r->regcache;
11573
11574 uint32_t shift_imm = 0;
11575 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11576 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11577 uint32_t record_buf[8], record_buf_mem[8];
11578
11579 LONGEST s_word;
11580 ULONGEST u_regval[2];
11581
11582 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11583 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11584
11585 /* Handle enhanced store insns and LDRD DSP insn,
11586 order begins according to addressing modes for store insns
11587 STRH insn. */
11588
11589 /* LDR or STR? */
11590 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11591 {
11592 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11593 /* LDR insn has a capability to do branching, if
11594 MOV LR, PC is precedded by LDR insn having Rn as R15
11595 in that case, it emulates branch and link insn, and hence we
11596 need to save CSPR and PC as well. */
11597 if (15 != reg_dest)
11598 {
11599 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11600 arm_insn_r->reg_rec_count = 1;
11601 }
11602 else
11603 {
11604 record_buf[0] = reg_dest;
11605 record_buf[1] = ARM_PS_REGNUM;
11606 arm_insn_r->reg_rec_count = 2;
11607 }
11608 }
11609 else
11610 {
11611 if (! bits (arm_insn_r->arm_insn, 4, 11))
11612 {
11613 /* Store insn, register offset and register pre-indexed,
11614 register post-indexed. */
11615 /* Get Rm. */
11616 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11617 /* Get Rn. */
11618 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11619 regcache_raw_read_unsigned (reg_cache, reg_src1
11620 , &u_regval[0]);
11621 regcache_raw_read_unsigned (reg_cache, reg_src2
11622 , &u_regval[1]);
11623 if (15 == reg_src2)
11624 {
11625 /* If R15 was used as Rn, hence current PC+8. */
11626 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11627 u_regval[0] = u_regval[0] + 8;
11628 }
11629 /* Calculate target store address, Rn +/- Rm, register offset. */
11630 /* U == 1. */
11631 if (bit (arm_insn_r->arm_insn, 23))
11632 {
11633 tgt_mem_addr = u_regval[0] + u_regval[1];
11634 }
11635 else
11636 {
11637 tgt_mem_addr = u_regval[1] - u_regval[0];
11638 }
11639
11640 switch (arm_insn_r->opcode)
11641 {
11642 /* STR. */
11643 case 8:
11644 case 12:
11645 /* STR. */
11646 case 9:
11647 case 13:
11648 /* STRT. */
11649 case 1:
11650 case 5:
11651 /* STR. */
11652 case 0:
11653 case 4:
11654 record_buf_mem[0] = 4;
11655 break;
11656
11657 /* STRB. */
11658 case 10:
11659 case 14:
11660 /* STRB. */
11661 case 11:
11662 case 15:
11663 /* STRBT. */
11664 case 3:
11665 case 7:
11666 /* STRB. */
11667 case 2:
11668 case 6:
11669 record_buf_mem[0] = 1;
11670 break;
11671
11672 default:
11673 gdb_assert_not_reached ("no decoding pattern found");
11674 break;
11675 }
11676 record_buf_mem[1] = tgt_mem_addr;
11677 arm_insn_r->mem_rec_count = 1;
11678
11679 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11680 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11681 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11682 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11683 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11684 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11685 )
11686 {
11687 /* Rn is going to be changed in pre-indexed mode and
11688 post-indexed mode as well. */
11689 record_buf[0] = reg_src2;
11690 arm_insn_r->reg_rec_count = 1;
11691 }
11692 }
11693 else
11694 {
11695 /* Store insn, scaled register offset; scaled pre-indexed. */
11696 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11697 /* Get Rm. */
11698 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11699 /* Get Rn. */
11700 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11701 /* Get shift_imm. */
11702 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11703 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11704 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11705 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11706 /* Offset_12 used as shift. */
11707 switch (offset_12)
11708 {
11709 case 0:
11710 /* Offset_12 used as index. */
11711 offset_12 = u_regval[0] << shift_imm;
11712 break;
11713
11714 case 1:
11715 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11716 break;
11717
11718 case 2:
11719 if (!shift_imm)
11720 {
11721 if (bit (u_regval[0], 31))
11722 {
11723 offset_12 = 0xFFFFFFFF;
11724 }
11725 else
11726 {
11727 offset_12 = 0;
11728 }
11729 }
11730 else
11731 {
11732 /* This is arithmetic shift. */
11733 offset_12 = s_word >> shift_imm;
11734 }
11735 break;
11736
11737 case 3:
11738 if (!shift_imm)
11739 {
11740 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11741 &u_regval[1]);
11742 /* Get C flag value and shift it by 31. */
11743 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11744 | (u_regval[0]) >> 1);
11745 }
11746 else
11747 {
11748 offset_12 = (u_regval[0] >> shift_imm) \
11749 | (u_regval[0] <<
11750 (sizeof(uint32_t) - shift_imm));
11751 }
11752 break;
11753
11754 default:
11755 gdb_assert_not_reached ("no decoding pattern found");
11756 break;
11757 }
11758
11759 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11760 /* bit U set. */
11761 if (bit (arm_insn_r->arm_insn, 23))
11762 {
11763 tgt_mem_addr = u_regval[1] + offset_12;
11764 }
11765 else
11766 {
11767 tgt_mem_addr = u_regval[1] - offset_12;
11768 }
11769
11770 switch (arm_insn_r->opcode)
11771 {
11772 /* STR. */
11773 case 8:
11774 case 12:
11775 /* STR. */
11776 case 9:
11777 case 13:
11778 /* STRT. */
11779 case 1:
11780 case 5:
11781 /* STR. */
11782 case 0:
11783 case 4:
11784 record_buf_mem[0] = 4;
11785 break;
11786
11787 /* STRB. */
11788 case 10:
11789 case 14:
11790 /* STRB. */
11791 case 11:
11792 case 15:
11793 /* STRBT. */
11794 case 3:
11795 case 7:
11796 /* STRB. */
11797 case 2:
11798 case 6:
11799 record_buf_mem[0] = 1;
11800 break;
11801
11802 default:
11803 gdb_assert_not_reached ("no decoding pattern found");
11804 break;
11805 }
11806 record_buf_mem[1] = tgt_mem_addr;
11807 arm_insn_r->mem_rec_count = 1;
11808
11809 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11810 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11811 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11812 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11813 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11814 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11815 )
11816 {
11817 /* Rn is going to be changed in register scaled pre-indexed
11818 mode,and scaled post indexed mode. */
11819 record_buf[0] = reg_src2;
11820 arm_insn_r->reg_rec_count = 1;
11821 }
11822 }
11823 }
11824
11825 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11826 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11827 return 0;
11828 }
11829
11830 /* Handle ARM mode instructions with opcode 100. */
11831
11832 static int
11833 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11834 {
11835 struct regcache *reg_cache = arm_insn_r->regcache;
11836 uint32_t register_count = 0, register_bits;
11837 uint32_t reg_base, addr_mode;
11838 uint32_t record_buf[24], record_buf_mem[48];
11839 uint32_t wback;
11840 ULONGEST u_regval;
11841
11842 /* Fetch the list of registers. */
11843 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11844 arm_insn_r->reg_rec_count = 0;
11845
11846 /* Fetch the base register that contains the address we are loading data
11847 to. */
11848 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11849
11850 /* Calculate wback. */
11851 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11852
11853 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11854 {
11855 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11856
11857 /* Find out which registers are going to be loaded from memory. */
11858 while (register_bits)
11859 {
11860 if (register_bits & 0x00000001)
11861 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11862 register_bits = register_bits >> 1;
11863 register_count++;
11864 }
11865
11866
11867 /* If wback is true, also save the base register, which is going to be
11868 written to. */
11869 if (wback)
11870 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11871
11872 /* Save the CPSR register. */
11873 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11874 }
11875 else
11876 {
11877 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11878
11879 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11880
11881 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11882
11883 /* Find out how many registers are going to be stored to memory. */
11884 while (register_bits)
11885 {
11886 if (register_bits & 0x00000001)
11887 register_count++;
11888 register_bits = register_bits >> 1;
11889 }
11890
11891 switch (addr_mode)
11892 {
11893 /* STMDA (STMED): Decrement after. */
11894 case 0:
11895 record_buf_mem[1] = (uint32_t) u_regval
11896 - register_count * INT_REGISTER_SIZE + 4;
11897 break;
11898 /* STM (STMIA, STMEA): Increment after. */
11899 case 1:
11900 record_buf_mem[1] = (uint32_t) u_regval;
11901 break;
11902 /* STMDB (STMFD): Decrement before. */
11903 case 2:
11904 record_buf_mem[1] = (uint32_t) u_regval
11905 - register_count * INT_REGISTER_SIZE;
11906 break;
11907 /* STMIB (STMFA): Increment before. */
11908 case 3:
11909 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11910 break;
11911 default:
11912 gdb_assert_not_reached ("no decoding pattern found");
11913 break;
11914 }
11915
11916 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11917 arm_insn_r->mem_rec_count = 1;
11918
11919 /* If wback is true, also save the base register, which is going to be
11920 written to. */
11921 if (wback)
11922 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11923 }
11924
11925 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11926 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11927 return 0;
11928 }
11929
11930 /* Handling opcode 101 insns. */
11931
11932 static int
11933 arm_record_b_bl (insn_decode_record *arm_insn_r)
11934 {
11935 uint32_t record_buf[8];
11936
11937 /* Handle B, BL, BLX(1) insns. */
11938 /* B simply branches so we do nothing here. */
11939 /* Note: BLX(1) doesnt fall here but instead it falls into
11940 extension space. */
11941 if (bit (arm_insn_r->arm_insn, 24))
11942 {
11943 record_buf[0] = ARM_LR_REGNUM;
11944 arm_insn_r->reg_rec_count = 1;
11945 }
11946
11947 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11948
11949 return 0;
11950 }
11951
11952 /* Handling opcode 110 insns. */
11953
11954 static int
11955 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11956 {
11957 printf_unfiltered (_("Process record does not support instruction "
11958 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11959 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11960
11961 return -1;
11962 }
11963
11964 /* Record handler for vector data transfer instructions. */
11965
11966 static int
11967 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11968 {
11969 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11970 uint32_t record_buf[4];
11971
11972 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11973 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11974 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11975 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11976 bit_l = bit (arm_insn_r->arm_insn, 20);
11977 bit_c = bit (arm_insn_r->arm_insn, 8);
11978
11979 /* Handle VMOV instruction. */
11980 if (bit_l && bit_c)
11981 {
11982 record_buf[0] = reg_t;
11983 arm_insn_r->reg_rec_count = 1;
11984 }
11985 else if (bit_l && !bit_c)
11986 {
11987 /* Handle VMOV instruction. */
11988 if (bits_a == 0x00)
11989 {
11990 if (bit (arm_insn_r->arm_insn, 20))
11991 record_buf[0] = reg_t;
11992 else
11993 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
11994 (reg_v << 1));
11995
11996 arm_insn_r->reg_rec_count = 1;
11997 }
11998 /* Handle VMRS instruction. */
11999 else if (bits_a == 0x07)
12000 {
12001 if (reg_t == 15)
12002 reg_t = ARM_PS_REGNUM;
12003
12004 record_buf[0] = reg_t;
12005 arm_insn_r->reg_rec_count = 1;
12006 }
12007 }
12008 else if (!bit_l && !bit_c)
12009 {
12010 /* Handle VMOV instruction. */
12011 if (bits_a == 0x00)
12012 {
12013 if (bit (arm_insn_r->arm_insn, 20))
12014 record_buf[0] = reg_t;
12015 else
12016 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12017 (reg_v << 1));
12018
12019 arm_insn_r->reg_rec_count = 1;
12020 }
12021 /* Handle VMSR instruction. */
12022 else if (bits_a == 0x07)
12023 {
12024 record_buf[0] = ARM_FPSCR_REGNUM;
12025 arm_insn_r->reg_rec_count = 1;
12026 }
12027 }
12028 else if (!bit_l && bit_c)
12029 {
12030 /* Handle VMOV instruction. */
12031 if (!(bits_a & 0x04))
12032 {
12033 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12034 + ARM_D0_REGNUM;
12035 arm_insn_r->reg_rec_count = 1;
12036 }
12037 /* Handle VDUP instruction. */
12038 else
12039 {
12040 if (bit (arm_insn_r->arm_insn, 21))
12041 {
12042 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12043 record_buf[0] = reg_v + ARM_D0_REGNUM;
12044 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12045 arm_insn_r->reg_rec_count = 2;
12046 }
12047 else
12048 {
12049 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12050 record_buf[0] = reg_v + ARM_D0_REGNUM;
12051 arm_insn_r->reg_rec_count = 1;
12052 }
12053 }
12054 }
12055
12056 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12057 return 0;
12058 }
12059
12060 /* Record handler for extension register load/store instructions. */
12061
12062 static int
12063 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12064 {
12065 uint32_t opcode, single_reg;
12066 uint8_t op_vldm_vstm;
12067 uint32_t record_buf[8], record_buf_mem[128];
12068 ULONGEST u_regval = 0;
12069
12070 struct regcache *reg_cache = arm_insn_r->regcache;
12071 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12072
12073 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12074 single_reg = bit (arm_insn_r->arm_insn, 8);
12075 op_vldm_vstm = opcode & 0x1b;
12076
12077 /* Handle VMOV instructions. */
12078 if ((opcode & 0x1e) == 0x04)
12079 {
12080 if (bit (arm_insn_r->arm_insn, 4))
12081 {
12082 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12083 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12084 arm_insn_r->reg_rec_count = 2;
12085 }
12086 else
12087 {
12088 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12089 | bit (arm_insn_r->arm_insn, 5);
12090
12091 if (!single_reg)
12092 {
12093 record_buf[0] = num_regs + reg_m;
12094 record_buf[1] = num_regs + reg_m + 1;
12095 arm_insn_r->reg_rec_count = 2;
12096 }
12097 else
12098 {
12099 record_buf[0] = reg_m + ARM_D0_REGNUM;
12100 arm_insn_r->reg_rec_count = 1;
12101 }
12102 }
12103 }
12104 /* Handle VSTM and VPUSH instructions. */
12105 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12106 || op_vldm_vstm == 0x12)
12107 {
12108 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12109 uint32_t memory_index = 0;
12110
12111 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12112 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12113 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12114 imm_off32 = imm_off8 << 24;
12115 memory_count = imm_off8;
12116
12117 if (bit (arm_insn_r->arm_insn, 23))
12118 start_address = u_regval;
12119 else
12120 start_address = u_regval - imm_off32;
12121
12122 if (bit (arm_insn_r->arm_insn, 21))
12123 {
12124 record_buf[0] = reg_rn;
12125 arm_insn_r->reg_rec_count = 1;
12126 }
12127
12128 while (memory_count > 0)
12129 {
12130 if (!single_reg)
12131 {
12132 record_buf_mem[memory_index] = start_address;
12133 record_buf_mem[memory_index + 1] = 4;
12134 start_address = start_address + 4;
12135 memory_index = memory_index + 2;
12136 }
12137 else
12138 {
12139 record_buf_mem[memory_index] = start_address;
12140 record_buf_mem[memory_index + 1] = 4;
12141 record_buf_mem[memory_index + 2] = start_address + 4;
12142 record_buf_mem[memory_index + 3] = 4;
12143 start_address = start_address + 8;
12144 memory_index = memory_index + 4;
12145 }
12146 memory_count--;
12147 }
12148 arm_insn_r->mem_rec_count = (memory_index >> 1);
12149 }
12150 /* Handle VLDM instructions. */
12151 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12152 || op_vldm_vstm == 0x13)
12153 {
12154 uint32_t reg_count, reg_vd;
12155 uint32_t reg_index = 0;
12156
12157 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12158 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12159
12160 if (single_reg)
12161 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12162 else
12163 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12164
12165 if (bit (arm_insn_r->arm_insn, 21))
12166 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12167
12168 while (reg_count > 0)
12169 {
12170 if (single_reg)
12171 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12172 else
12173 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12174
12175 reg_count--;
12176 }
12177 arm_insn_r->reg_rec_count = reg_index;
12178 }
12179 /* VSTR Vector store register. */
12180 else if ((opcode & 0x13) == 0x10)
12181 {
12182 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12183 uint32_t memory_index = 0;
12184
12185 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12186 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12187 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12188 imm_off32 = imm_off8 << 24;
12189 memory_count = imm_off8;
12190
12191 if (bit (arm_insn_r->arm_insn, 23))
12192 start_address = u_regval + imm_off32;
12193 else
12194 start_address = u_regval - imm_off32;
12195
12196 if (single_reg)
12197 {
12198 record_buf_mem[memory_index] = start_address;
12199 record_buf_mem[memory_index + 1] = 4;
12200 arm_insn_r->mem_rec_count = 1;
12201 }
12202 else
12203 {
12204 record_buf_mem[memory_index] = start_address;
12205 record_buf_mem[memory_index + 1] = 4;
12206 record_buf_mem[memory_index + 2] = start_address + 4;
12207 record_buf_mem[memory_index + 3] = 4;
12208 arm_insn_r->mem_rec_count = 2;
12209 }
12210 }
12211 /* VLDR Vector load register. */
12212 else if ((opcode & 0x13) == 0x11)
12213 {
12214 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12215
12216 if (!single_reg)
12217 {
12218 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12219 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12220 }
12221 else
12222 {
12223 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12224 record_buf[0] = num_regs + reg_vd;
12225 }
12226 arm_insn_r->reg_rec_count = 1;
12227 }
12228
12229 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12230 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12231 return 0;
12232 }
12233
12234 /* Record handler for arm/thumb mode VFP data processing instructions. */
12235
12236 static int
12237 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12238 {
12239 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12240 uint32_t record_buf[4];
12241 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12242 enum insn_types curr_insn_type = INSN_INV;
12243
12244 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12245 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12246 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12247 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12248 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12249 bit_d = bit (arm_insn_r->arm_insn, 22);
12250 opc1 = opc1 & 0x04;
12251
12252 /* Handle VMLA, VMLS. */
12253 if (opc1 == 0x00)
12254 {
12255 if (bit (arm_insn_r->arm_insn, 10))
12256 {
12257 if (bit (arm_insn_r->arm_insn, 6))
12258 curr_insn_type = INSN_T0;
12259 else
12260 curr_insn_type = INSN_T1;
12261 }
12262 else
12263 {
12264 if (dp_op_sz)
12265 curr_insn_type = INSN_T1;
12266 else
12267 curr_insn_type = INSN_T2;
12268 }
12269 }
12270 /* Handle VNMLA, VNMLS, VNMUL. */
12271 else if (opc1 == 0x01)
12272 {
12273 if (dp_op_sz)
12274 curr_insn_type = INSN_T1;
12275 else
12276 curr_insn_type = INSN_T2;
12277 }
12278 /* Handle VMUL. */
12279 else if (opc1 == 0x02 && !(opc3 & 0x01))
12280 {
12281 if (bit (arm_insn_r->arm_insn, 10))
12282 {
12283 if (bit (arm_insn_r->arm_insn, 6))
12284 curr_insn_type = INSN_T0;
12285 else
12286 curr_insn_type = INSN_T1;
12287 }
12288 else
12289 {
12290 if (dp_op_sz)
12291 curr_insn_type = INSN_T1;
12292 else
12293 curr_insn_type = INSN_T2;
12294 }
12295 }
12296 /* Handle VADD, VSUB. */
12297 else if (opc1 == 0x03)
12298 {
12299 if (!bit (arm_insn_r->arm_insn, 9))
12300 {
12301 if (bit (arm_insn_r->arm_insn, 6))
12302 curr_insn_type = INSN_T0;
12303 else
12304 curr_insn_type = INSN_T1;
12305 }
12306 else
12307 {
12308 if (dp_op_sz)
12309 curr_insn_type = INSN_T1;
12310 else
12311 curr_insn_type = INSN_T2;
12312 }
12313 }
12314 /* Handle VDIV. */
12315 else if (opc1 == 0x0b)
12316 {
12317 if (dp_op_sz)
12318 curr_insn_type = INSN_T1;
12319 else
12320 curr_insn_type = INSN_T2;
12321 }
12322 /* Handle all other vfp data processing instructions. */
12323 else if (opc1 == 0x0b)
12324 {
12325 /* Handle VMOV. */
12326 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12327 {
12328 if (bit (arm_insn_r->arm_insn, 4))
12329 {
12330 if (bit (arm_insn_r->arm_insn, 6))
12331 curr_insn_type = INSN_T0;
12332 else
12333 curr_insn_type = INSN_T1;
12334 }
12335 else
12336 {
12337 if (dp_op_sz)
12338 curr_insn_type = INSN_T1;
12339 else
12340 curr_insn_type = INSN_T2;
12341 }
12342 }
12343 /* Handle VNEG and VABS. */
12344 else if ((opc2 == 0x01 && opc3 == 0x01)
12345 || (opc2 == 0x00 && opc3 == 0x03))
12346 {
12347 if (!bit (arm_insn_r->arm_insn, 11))
12348 {
12349 if (bit (arm_insn_r->arm_insn, 6))
12350 curr_insn_type = INSN_T0;
12351 else
12352 curr_insn_type = INSN_T1;
12353 }
12354 else
12355 {
12356 if (dp_op_sz)
12357 curr_insn_type = INSN_T1;
12358 else
12359 curr_insn_type = INSN_T2;
12360 }
12361 }
12362 /* Handle VSQRT. */
12363 else if (opc2 == 0x01 && opc3 == 0x03)
12364 {
12365 if (dp_op_sz)
12366 curr_insn_type = INSN_T1;
12367 else
12368 curr_insn_type = INSN_T2;
12369 }
12370 /* Handle VCVT. */
12371 else if (opc2 == 0x07 && opc3 == 0x03)
12372 {
12373 if (!dp_op_sz)
12374 curr_insn_type = INSN_T1;
12375 else
12376 curr_insn_type = INSN_T2;
12377 }
12378 else if (opc3 & 0x01)
12379 {
12380 /* Handle VCVT. */
12381 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12382 {
12383 if (!bit (arm_insn_r->arm_insn, 18))
12384 curr_insn_type = INSN_T2;
12385 else
12386 {
12387 if (dp_op_sz)
12388 curr_insn_type = INSN_T1;
12389 else
12390 curr_insn_type = INSN_T2;
12391 }
12392 }
12393 /* Handle VCVT. */
12394 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12395 {
12396 if (dp_op_sz)
12397 curr_insn_type = INSN_T1;
12398 else
12399 curr_insn_type = INSN_T2;
12400 }
12401 /* Handle VCVTB, VCVTT. */
12402 else if ((opc2 & 0x0e) == 0x02)
12403 curr_insn_type = INSN_T2;
12404 /* Handle VCMP, VCMPE. */
12405 else if ((opc2 & 0x0e) == 0x04)
12406 curr_insn_type = INSN_T3;
12407 }
12408 }
12409
12410 switch (curr_insn_type)
12411 {
12412 case INSN_T0:
12413 reg_vd = reg_vd | (bit_d << 4);
12414 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12415 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12416 arm_insn_r->reg_rec_count = 2;
12417 break;
12418
12419 case INSN_T1:
12420 reg_vd = reg_vd | (bit_d << 4);
12421 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12422 arm_insn_r->reg_rec_count = 1;
12423 break;
12424
12425 case INSN_T2:
12426 reg_vd = (reg_vd << 1) | bit_d;
12427 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12428 arm_insn_r->reg_rec_count = 1;
12429 break;
12430
12431 case INSN_T3:
12432 record_buf[0] = ARM_FPSCR_REGNUM;
12433 arm_insn_r->reg_rec_count = 1;
12434 break;
12435
12436 default:
12437 gdb_assert_not_reached ("no decoding pattern found");
12438 break;
12439 }
12440
12441 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12442 return 0;
12443 }
12444
12445 /* Handling opcode 110 insns. */
12446
12447 static int
12448 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12449 {
12450 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12451
12452 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12453 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12454 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12455
12456 if ((coproc & 0x0e) == 0x0a)
12457 {
12458 /* Handle extension register ld/st instructions. */
12459 if (!(op1 & 0x20))
12460 return arm_record_exreg_ld_st_insn (arm_insn_r);
12461
12462 /* 64-bit transfers between arm core and extension registers. */
12463 if ((op1 & 0x3e) == 0x04)
12464 return arm_record_exreg_ld_st_insn (arm_insn_r);
12465 }
12466 else
12467 {
12468 /* Handle coprocessor ld/st instructions. */
12469 if (!(op1 & 0x3a))
12470 {
12471 /* Store. */
12472 if (!op1_ebit)
12473 return arm_record_unsupported_insn (arm_insn_r);
12474 else
12475 /* Load. */
12476 return arm_record_unsupported_insn (arm_insn_r);
12477 }
12478
12479 /* Move to coprocessor from two arm core registers. */
12480 if (op1 == 0x4)
12481 return arm_record_unsupported_insn (arm_insn_r);
12482
12483 /* Move to two arm core registers from coprocessor. */
12484 if (op1 == 0x5)
12485 {
12486 uint32_t reg_t[2];
12487
12488 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12489 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12490 arm_insn_r->reg_rec_count = 2;
12491
12492 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12493 return 0;
12494 }
12495 }
12496 return arm_record_unsupported_insn (arm_insn_r);
12497 }
12498
12499 /* Handling opcode 111 insns. */
12500
12501 static int
12502 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12503 {
12504 uint32_t op, op1_sbit, op1_ebit, coproc;
12505 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12506 struct regcache *reg_cache = arm_insn_r->regcache;
12507 ULONGEST u_regval = 0;
12508
12509 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12510 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12511 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12512 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12513 op = bit (arm_insn_r->arm_insn, 4);
12514
12515 /* Handle arm SWI/SVC system call instructions. */
12516 if (op1_sbit)
12517 {
12518 if (tdep->arm_syscall_record != NULL)
12519 {
12520 ULONGEST svc_operand, svc_number;
12521
12522 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12523
12524 if (svc_operand) /* OABI. */
12525 svc_number = svc_operand - 0x900000;
12526 else /* EABI. */
12527 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12528
12529 return tdep->arm_syscall_record (reg_cache, svc_number);
12530 }
12531 else
12532 {
12533 printf_unfiltered (_("no syscall record support\n"));
12534 return -1;
12535 }
12536 }
12537
12538 if ((coproc & 0x0e) == 0x0a)
12539 {
12540 /* VFP data-processing instructions. */
12541 if (!op1_sbit && !op)
12542 return arm_record_vfp_data_proc_insn (arm_insn_r);
12543
12544 /* Advanced SIMD, VFP instructions. */
12545 if (!op1_sbit && op)
12546 return arm_record_vdata_transfer_insn (arm_insn_r);
12547 }
12548 else
12549 {
12550 /* Coprocessor data operations. */
12551 if (!op1_sbit && !op)
12552 return arm_record_unsupported_insn (arm_insn_r);
12553
12554 /* Move to Coprocessor from ARM core register. */
12555 if (!op1_sbit && !op1_ebit && op)
12556 return arm_record_unsupported_insn (arm_insn_r);
12557
12558 /* Move to arm core register from coprocessor. */
12559 if (!op1_sbit && op1_ebit && op)
12560 {
12561 uint32_t record_buf[1];
12562
12563 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12564 if (record_buf[0] == 15)
12565 record_buf[0] = ARM_PS_REGNUM;
12566
12567 arm_insn_r->reg_rec_count = 1;
12568 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12569 record_buf);
12570 return 0;
12571 }
12572 }
12573
12574 return arm_record_unsupported_insn (arm_insn_r);
12575 }
12576
12577 /* Handling opcode 000 insns. */
12578
12579 static int
12580 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12581 {
12582 uint32_t record_buf[8];
12583 uint32_t reg_src1 = 0;
12584
12585 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12586
12587 record_buf[0] = ARM_PS_REGNUM;
12588 record_buf[1] = reg_src1;
12589 thumb_insn_r->reg_rec_count = 2;
12590
12591 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12592
12593 return 0;
12594 }
12595
12596
12597 /* Handling opcode 001 insns. */
12598
12599 static int
12600 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12601 {
12602 uint32_t record_buf[8];
12603 uint32_t reg_src1 = 0;
12604
12605 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12606
12607 record_buf[0] = ARM_PS_REGNUM;
12608 record_buf[1] = reg_src1;
12609 thumb_insn_r->reg_rec_count = 2;
12610
12611 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12612
12613 return 0;
12614 }
12615
12616 /* Handling opcode 010 insns. */
12617
12618 static int
12619 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12620 {
12621 struct regcache *reg_cache = thumb_insn_r->regcache;
12622 uint32_t record_buf[8], record_buf_mem[8];
12623
12624 uint32_t reg_src1 = 0, reg_src2 = 0;
12625 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12626
12627 ULONGEST u_regval[2] = {0};
12628
12629 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12630
12631 if (bit (thumb_insn_r->arm_insn, 12))
12632 {
12633 /* Handle load/store register offset. */
12634 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12635 if (opcode2 >= 12 && opcode2 <= 15)
12636 {
12637 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12638 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12639 record_buf[0] = reg_src1;
12640 thumb_insn_r->reg_rec_count = 1;
12641 }
12642 else if (opcode2 >= 8 && opcode2 <= 10)
12643 {
12644 /* STR(2), STRB(2), STRH(2) . */
12645 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12646 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12647 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12648 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12649 if (8 == opcode2)
12650 record_buf_mem[0] = 4; /* STR (2). */
12651 else if (10 == opcode2)
12652 record_buf_mem[0] = 1; /* STRB (2). */
12653 else if (9 == opcode2)
12654 record_buf_mem[0] = 2; /* STRH (2). */
12655 record_buf_mem[1] = u_regval[0] + u_regval[1];
12656 thumb_insn_r->mem_rec_count = 1;
12657 }
12658 }
12659 else if (bit (thumb_insn_r->arm_insn, 11))
12660 {
12661 /* Handle load from literal pool. */
12662 /* LDR(3). */
12663 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12664 record_buf[0] = reg_src1;
12665 thumb_insn_r->reg_rec_count = 1;
12666 }
12667 else if (opcode1)
12668 {
12669 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12670 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12671 if ((3 == opcode2) && (!opcode3))
12672 {
12673 /* Branch with exchange. */
12674 record_buf[0] = ARM_PS_REGNUM;
12675 thumb_insn_r->reg_rec_count = 1;
12676 }
12677 else
12678 {
12679 /* Format 8; special data processing insns. */
12680 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12681 record_buf[0] = ARM_PS_REGNUM;
12682 record_buf[1] = reg_src1;
12683 thumb_insn_r->reg_rec_count = 2;
12684 }
12685 }
12686 else
12687 {
12688 /* Format 5; data processing insns. */
12689 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12690 if (bit (thumb_insn_r->arm_insn, 7))
12691 {
12692 reg_src1 = reg_src1 + 8;
12693 }
12694 record_buf[0] = ARM_PS_REGNUM;
12695 record_buf[1] = reg_src1;
12696 thumb_insn_r->reg_rec_count = 2;
12697 }
12698
12699 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12700 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12701 record_buf_mem);
12702
12703 return 0;
12704 }
12705
12706 /* Handling opcode 001 insns. */
12707
12708 static int
12709 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12710 {
12711 struct regcache *reg_cache = thumb_insn_r->regcache;
12712 uint32_t record_buf[8], record_buf_mem[8];
12713
12714 uint32_t reg_src1 = 0;
12715 uint32_t opcode = 0, immed_5 = 0;
12716
12717 ULONGEST u_regval = 0;
12718
12719 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12720
12721 if (opcode)
12722 {
12723 /* LDR(1). */
12724 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12725 record_buf[0] = reg_src1;
12726 thumb_insn_r->reg_rec_count = 1;
12727 }
12728 else
12729 {
12730 /* STR(1). */
12731 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12732 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12733 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12734 record_buf_mem[0] = 4;
12735 record_buf_mem[1] = u_regval + (immed_5 * 4);
12736 thumb_insn_r->mem_rec_count = 1;
12737 }
12738
12739 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12740 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12741 record_buf_mem);
12742
12743 return 0;
12744 }
12745
12746 /* Handling opcode 100 insns. */
12747
12748 static int
12749 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12750 {
12751 struct regcache *reg_cache = thumb_insn_r->regcache;
12752 uint32_t record_buf[8], record_buf_mem[8];
12753
12754 uint32_t reg_src1 = 0;
12755 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12756
12757 ULONGEST u_regval = 0;
12758
12759 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12760
12761 if (3 == opcode)
12762 {
12763 /* LDR(4). */
12764 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12765 record_buf[0] = reg_src1;
12766 thumb_insn_r->reg_rec_count = 1;
12767 }
12768 else if (1 == opcode)
12769 {
12770 /* LDRH(1). */
12771 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12772 record_buf[0] = reg_src1;
12773 thumb_insn_r->reg_rec_count = 1;
12774 }
12775 else if (2 == opcode)
12776 {
12777 /* STR(3). */
12778 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12779 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12780 record_buf_mem[0] = 4;
12781 record_buf_mem[1] = u_regval + (immed_8 * 4);
12782 thumb_insn_r->mem_rec_count = 1;
12783 }
12784 else if (0 == opcode)
12785 {
12786 /* STRH(1). */
12787 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12788 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12789 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12790 record_buf_mem[0] = 2;
12791 record_buf_mem[1] = u_regval + (immed_5 * 2);
12792 thumb_insn_r->mem_rec_count = 1;
12793 }
12794
12795 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12796 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12797 record_buf_mem);
12798
12799 return 0;
12800 }
12801
12802 /* Handling opcode 101 insns. */
12803
12804 static int
12805 thumb_record_misc (insn_decode_record *thumb_insn_r)
12806 {
12807 struct regcache *reg_cache = thumb_insn_r->regcache;
12808
12809 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12810 uint32_t register_bits = 0, register_count = 0;
12811 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12812 uint32_t record_buf[24], record_buf_mem[48];
12813 uint32_t reg_src1;
12814
12815 ULONGEST u_regval = 0;
12816
12817 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12818 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12819 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12820
12821 if (14 == opcode2)
12822 {
12823 /* POP. */
12824 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12825 while (register_bits)
12826 {
12827 if (register_bits & 0x00000001)
12828 record_buf[index++] = register_count;
12829 register_bits = register_bits >> 1;
12830 register_count++;
12831 }
12832 record_buf[index++] = ARM_PS_REGNUM;
12833 record_buf[index++] = ARM_SP_REGNUM;
12834 thumb_insn_r->reg_rec_count = index;
12835 }
12836 else if (10 == opcode2)
12837 {
12838 /* PUSH. */
12839 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12840 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12841 while (register_bits)
12842 {
12843 if (register_bits & 0x00000001)
12844 register_count++;
12845 register_bits = register_bits >> 1;
12846 }
12847 start_address = u_regval - \
12848 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12849 thumb_insn_r->mem_rec_count = register_count;
12850 while (register_count)
12851 {
12852 record_buf_mem[(register_count * 2) - 1] = start_address;
12853 record_buf_mem[(register_count * 2) - 2] = 4;
12854 start_address = start_address + 4;
12855 register_count--;
12856 }
12857 record_buf[0] = ARM_SP_REGNUM;
12858 thumb_insn_r->reg_rec_count = 1;
12859 }
12860 else if (0x1E == opcode1)
12861 {
12862 /* BKPT insn. */
12863 /* Handle enhanced software breakpoint insn, BKPT. */
12864 /* CPSR is changed to be executed in ARM state, disabling normal
12865 interrupts, entering abort mode. */
12866 /* According to high vector configuration PC is set. */
12867 /* User hits breakpoint and type reverse, in that case, we need to go back with
12868 previous CPSR and Program Counter. */
12869 record_buf[0] = ARM_PS_REGNUM;
12870 record_buf[1] = ARM_LR_REGNUM;
12871 thumb_insn_r->reg_rec_count = 2;
12872 /* We need to save SPSR value, which is not yet done. */
12873 printf_unfiltered (_("Process record does not support instruction "
12874 "0x%0x at address %s.\n"),
12875 thumb_insn_r->arm_insn,
12876 paddress (thumb_insn_r->gdbarch,
12877 thumb_insn_r->this_addr));
12878 return -1;
12879 }
12880 else if ((0 == opcode) || (1 == opcode))
12881 {
12882 /* ADD(5), ADD(6). */
12883 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12884 record_buf[0] = reg_src1;
12885 thumb_insn_r->reg_rec_count = 1;
12886 }
12887 else if (2 == opcode)
12888 {
12889 /* ADD(7), SUB(4). */
12890 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12891 record_buf[0] = ARM_SP_REGNUM;
12892 thumb_insn_r->reg_rec_count = 1;
12893 }
12894
12895 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12896 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12897 record_buf_mem);
12898
12899 return 0;
12900 }
12901
12902 /* Handling opcode 110 insns. */
12903
12904 static int
12905 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12906 {
12907 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12908 struct regcache *reg_cache = thumb_insn_r->regcache;
12909
12910 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12911 uint32_t reg_src1 = 0;
12912 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12913 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12914 uint32_t record_buf[24], record_buf_mem[48];
12915
12916 ULONGEST u_regval = 0;
12917
12918 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12919 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12920
12921 if (1 == opcode2)
12922 {
12923
12924 /* LDMIA. */
12925 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12926 /* Get Rn. */
12927 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12928 while (register_bits)
12929 {
12930 if (register_bits & 0x00000001)
12931 record_buf[index++] = register_count;
12932 register_bits = register_bits >> 1;
12933 register_count++;
12934 }
12935 record_buf[index++] = reg_src1;
12936 thumb_insn_r->reg_rec_count = index;
12937 }
12938 else if (0 == opcode2)
12939 {
12940 /* It handles both STMIA. */
12941 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12942 /* Get Rn. */
12943 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12944 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12945 while (register_bits)
12946 {
12947 if (register_bits & 0x00000001)
12948 register_count++;
12949 register_bits = register_bits >> 1;
12950 }
12951 start_address = u_regval;
12952 thumb_insn_r->mem_rec_count = register_count;
12953 while (register_count)
12954 {
12955 record_buf_mem[(register_count * 2) - 1] = start_address;
12956 record_buf_mem[(register_count * 2) - 2] = 4;
12957 start_address = start_address + 4;
12958 register_count--;
12959 }
12960 }
12961 else if (0x1F == opcode1)
12962 {
12963 /* Handle arm syscall insn. */
12964 if (tdep->arm_syscall_record != NULL)
12965 {
12966 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12967 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12968 }
12969 else
12970 {
12971 printf_unfiltered (_("no syscall record support\n"));
12972 return -1;
12973 }
12974 }
12975
12976 /* B (1), conditional branch is automatically taken care in process_record,
12977 as PC is saved there. */
12978
12979 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12980 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12981 record_buf_mem);
12982
12983 return ret;
12984 }
12985
12986 /* Handling opcode 111 insns. */
12987
12988 static int
12989 thumb_record_branch (insn_decode_record *thumb_insn_r)
12990 {
12991 uint32_t record_buf[8];
12992 uint32_t bits_h = 0;
12993
12994 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12995
12996 if (2 == bits_h || 3 == bits_h)
12997 {
12998 /* BL */
12999 record_buf[0] = ARM_LR_REGNUM;
13000 thumb_insn_r->reg_rec_count = 1;
13001 }
13002 else if (1 == bits_h)
13003 {
13004 /* BLX(1). */
13005 record_buf[0] = ARM_PS_REGNUM;
13006 record_buf[1] = ARM_LR_REGNUM;
13007 thumb_insn_r->reg_rec_count = 2;
13008 }
13009
13010 /* B(2) is automatically taken care in process_record, as PC is
13011 saved there. */
13012
13013 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13014
13015 return 0;
13016 }
13017
13018 /* Handler for thumb2 load/store multiple instructions. */
13019
13020 static int
13021 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13022 {
13023 struct regcache *reg_cache = thumb2_insn_r->regcache;
13024
13025 uint32_t reg_rn, op;
13026 uint32_t register_bits = 0, register_count = 0;
13027 uint32_t index = 0, start_address = 0;
13028 uint32_t record_buf[24], record_buf_mem[48];
13029
13030 ULONGEST u_regval = 0;
13031
13032 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13033 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13034
13035 if (0 == op || 3 == op)
13036 {
13037 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13038 {
13039 /* Handle RFE instruction. */
13040 record_buf[0] = ARM_PS_REGNUM;
13041 thumb2_insn_r->reg_rec_count = 1;
13042 }
13043 else
13044 {
13045 /* Handle SRS instruction after reading banked SP. */
13046 return arm_record_unsupported_insn (thumb2_insn_r);
13047 }
13048 }
13049 else if (1 == op || 2 == op)
13050 {
13051 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13052 {
13053 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13054 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13055 while (register_bits)
13056 {
13057 if (register_bits & 0x00000001)
13058 record_buf[index++] = register_count;
13059
13060 register_count++;
13061 register_bits = register_bits >> 1;
13062 }
13063 record_buf[index++] = reg_rn;
13064 record_buf[index++] = ARM_PS_REGNUM;
13065 thumb2_insn_r->reg_rec_count = index;
13066 }
13067 else
13068 {
13069 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13070 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13071 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13072 while (register_bits)
13073 {
13074 if (register_bits & 0x00000001)
13075 register_count++;
13076
13077 register_bits = register_bits >> 1;
13078 }
13079
13080 if (1 == op)
13081 {
13082 /* Start address calculation for LDMDB/LDMEA. */
13083 start_address = u_regval;
13084 }
13085 else if (2 == op)
13086 {
13087 /* Start address calculation for LDMDB/LDMEA. */
13088 start_address = u_regval - register_count * 4;
13089 }
13090
13091 thumb2_insn_r->mem_rec_count = register_count;
13092 while (register_count)
13093 {
13094 record_buf_mem[register_count * 2 - 1] = start_address;
13095 record_buf_mem[register_count * 2 - 2] = 4;
13096 start_address = start_address + 4;
13097 register_count--;
13098 }
13099 record_buf[0] = reg_rn;
13100 record_buf[1] = ARM_PS_REGNUM;
13101 thumb2_insn_r->reg_rec_count = 2;
13102 }
13103 }
13104
13105 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13106 record_buf_mem);
13107 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13108 record_buf);
13109 return ARM_RECORD_SUCCESS;
13110 }
13111
13112 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13113 instructions. */
13114
13115 static int
13116 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13117 {
13118 struct regcache *reg_cache = thumb2_insn_r->regcache;
13119
13120 uint32_t reg_rd, reg_rn, offset_imm;
13121 uint32_t reg_dest1, reg_dest2;
13122 uint32_t address, offset_addr;
13123 uint32_t record_buf[8], record_buf_mem[8];
13124 uint32_t op1, op2, op3;
13125 LONGEST s_word;
13126
13127 ULONGEST u_regval[2];
13128
13129 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13130 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13131 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13132
13133 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13134 {
13135 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13136 {
13137 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13138 record_buf[0] = reg_dest1;
13139 record_buf[1] = ARM_PS_REGNUM;
13140 thumb2_insn_r->reg_rec_count = 2;
13141 }
13142
13143 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13144 {
13145 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13146 record_buf[2] = reg_dest2;
13147 thumb2_insn_r->reg_rec_count = 3;
13148 }
13149 }
13150 else
13151 {
13152 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13153 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13154
13155 if (0 == op1 && 0 == op2)
13156 {
13157 /* Handle STREX. */
13158 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13159 address = u_regval[0] + (offset_imm * 4);
13160 record_buf_mem[0] = 4;
13161 record_buf_mem[1] = address;
13162 thumb2_insn_r->mem_rec_count = 1;
13163 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13164 record_buf[0] = reg_rd;
13165 thumb2_insn_r->reg_rec_count = 1;
13166 }
13167 else if (1 == op1 && 0 == op2)
13168 {
13169 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13170 record_buf[0] = reg_rd;
13171 thumb2_insn_r->reg_rec_count = 1;
13172 address = u_regval[0];
13173 record_buf_mem[1] = address;
13174
13175 if (4 == op3)
13176 {
13177 /* Handle STREXB. */
13178 record_buf_mem[0] = 1;
13179 thumb2_insn_r->mem_rec_count = 1;
13180 }
13181 else if (5 == op3)
13182 {
13183 /* Handle STREXH. */
13184 record_buf_mem[0] = 2 ;
13185 thumb2_insn_r->mem_rec_count = 1;
13186 }
13187 else if (7 == op3)
13188 {
13189 /* Handle STREXD. */
13190 address = u_regval[0];
13191 record_buf_mem[0] = 4;
13192 record_buf_mem[2] = 4;
13193 record_buf_mem[3] = address + 4;
13194 thumb2_insn_r->mem_rec_count = 2;
13195 }
13196 }
13197 else
13198 {
13199 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13200
13201 if (bit (thumb2_insn_r->arm_insn, 24))
13202 {
13203 if (bit (thumb2_insn_r->arm_insn, 23))
13204 offset_addr = u_regval[0] + (offset_imm * 4);
13205 else
13206 offset_addr = u_regval[0] - (offset_imm * 4);
13207
13208 address = offset_addr;
13209 }
13210 else
13211 address = u_regval[0];
13212
13213 record_buf_mem[0] = 4;
13214 record_buf_mem[1] = address;
13215 record_buf_mem[2] = 4;
13216 record_buf_mem[3] = address + 4;
13217 thumb2_insn_r->mem_rec_count = 2;
13218 record_buf[0] = reg_rn;
13219 thumb2_insn_r->reg_rec_count = 1;
13220 }
13221 }
13222
13223 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13224 record_buf);
13225 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13226 record_buf_mem);
13227 return ARM_RECORD_SUCCESS;
13228 }
13229
13230 /* Handler for thumb2 data processing (shift register and modified immediate)
13231 instructions. */
13232
13233 static int
13234 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13235 {
13236 uint32_t reg_rd, op;
13237 uint32_t record_buf[8];
13238
13239 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13240 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13241
13242 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13243 {
13244 record_buf[0] = ARM_PS_REGNUM;
13245 thumb2_insn_r->reg_rec_count = 1;
13246 }
13247 else
13248 {
13249 record_buf[0] = reg_rd;
13250 record_buf[1] = ARM_PS_REGNUM;
13251 thumb2_insn_r->reg_rec_count = 2;
13252 }
13253
13254 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13255 record_buf);
13256 return ARM_RECORD_SUCCESS;
13257 }
13258
13259 /* Generic handler for thumb2 instructions which effect destination and PS
13260 registers. */
13261
13262 static int
13263 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13264 {
13265 uint32_t reg_rd;
13266 uint32_t record_buf[8];
13267
13268 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13269
13270 record_buf[0] = reg_rd;
13271 record_buf[1] = ARM_PS_REGNUM;
13272 thumb2_insn_r->reg_rec_count = 2;
13273
13274 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13275 record_buf);
13276 return ARM_RECORD_SUCCESS;
13277 }
13278
13279 /* Handler for thumb2 branch and miscellaneous control instructions. */
13280
13281 static int
13282 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13283 {
13284 uint32_t op, op1, op2;
13285 uint32_t record_buf[8];
13286
13287 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13288 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13289 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13290
13291 /* Handle MSR insn. */
13292 if (!(op1 & 0x2) && 0x38 == op)
13293 {
13294 if (!(op2 & 0x3))
13295 {
13296 /* CPSR is going to be changed. */
13297 record_buf[0] = ARM_PS_REGNUM;
13298 thumb2_insn_r->reg_rec_count = 1;
13299 }
13300 else
13301 {
13302 arm_record_unsupported_insn(thumb2_insn_r);
13303 return -1;
13304 }
13305 }
13306 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13307 {
13308 /* BLX. */
13309 record_buf[0] = ARM_PS_REGNUM;
13310 record_buf[1] = ARM_LR_REGNUM;
13311 thumb2_insn_r->reg_rec_count = 2;
13312 }
13313
13314 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13315 record_buf);
13316 return ARM_RECORD_SUCCESS;
13317 }
13318
13319 /* Handler for thumb2 store single data item instructions. */
13320
13321 static int
13322 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13323 {
13324 struct regcache *reg_cache = thumb2_insn_r->regcache;
13325
13326 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13327 uint32_t address, offset_addr;
13328 uint32_t record_buf[8], record_buf_mem[8];
13329 uint32_t op1, op2;
13330
13331 ULONGEST u_regval[2];
13332
13333 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13334 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13335 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13336 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13337
13338 if (bit (thumb2_insn_r->arm_insn, 23))
13339 {
13340 /* T2 encoding. */
13341 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13342 offset_addr = u_regval[0] + offset_imm;
13343 address = offset_addr;
13344 }
13345 else
13346 {
13347 /* T3 encoding. */
13348 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13349 {
13350 /* Handle STRB (register). */
13351 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13352 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13353 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13354 offset_addr = u_regval[1] << shift_imm;
13355 address = u_regval[0] + offset_addr;
13356 }
13357 else
13358 {
13359 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13360 if (bit (thumb2_insn_r->arm_insn, 10))
13361 {
13362 if (bit (thumb2_insn_r->arm_insn, 9))
13363 offset_addr = u_regval[0] + offset_imm;
13364 else
13365 offset_addr = u_regval[0] - offset_imm;
13366
13367 address = offset_addr;
13368 }
13369 else
13370 address = u_regval[0];
13371 }
13372 }
13373
13374 switch (op1)
13375 {
13376 /* Store byte instructions. */
13377 case 4:
13378 case 0:
13379 record_buf_mem[0] = 1;
13380 break;
13381 /* Store half word instructions. */
13382 case 1:
13383 case 5:
13384 record_buf_mem[0] = 2;
13385 break;
13386 /* Store word instructions. */
13387 case 2:
13388 case 6:
13389 record_buf_mem[0] = 4;
13390 break;
13391
13392 default:
13393 gdb_assert_not_reached ("no decoding pattern found");
13394 break;
13395 }
13396
13397 record_buf_mem[1] = address;
13398 thumb2_insn_r->mem_rec_count = 1;
13399 record_buf[0] = reg_rn;
13400 thumb2_insn_r->reg_rec_count = 1;
13401
13402 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13403 record_buf);
13404 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13405 record_buf_mem);
13406 return ARM_RECORD_SUCCESS;
13407 }
13408
13409 /* Handler for thumb2 load memory hints instructions. */
13410
13411 static int
13412 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13413 {
13414 uint32_t record_buf[8];
13415 uint32_t reg_rt, reg_rn;
13416
13417 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13418 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13419
13420 if (ARM_PC_REGNUM != reg_rt)
13421 {
13422 record_buf[0] = reg_rt;
13423 record_buf[1] = reg_rn;
13424 record_buf[2] = ARM_PS_REGNUM;
13425 thumb2_insn_r->reg_rec_count = 3;
13426
13427 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13428 record_buf);
13429 return ARM_RECORD_SUCCESS;
13430 }
13431
13432 return ARM_RECORD_FAILURE;
13433 }
13434
13435 /* Handler for thumb2 load word instructions. */
13436
13437 static int
13438 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13439 {
13440 uint32_t opcode1 = 0, opcode2 = 0;
13441 uint32_t record_buf[8];
13442
13443 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13444 record_buf[1] = ARM_PS_REGNUM;
13445 thumb2_insn_r->reg_rec_count = 2;
13446
13447 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13448 record_buf);
13449 return ARM_RECORD_SUCCESS;
13450 }
13451
13452 /* Handler for thumb2 long multiply, long multiply accumulate, and
13453 divide instructions. */
13454
13455 static int
13456 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13457 {
13458 uint32_t opcode1 = 0, opcode2 = 0;
13459 uint32_t record_buf[8];
13460 uint32_t reg_src1 = 0;
13461
13462 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13463 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13464
13465 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13466 {
13467 /* Handle SMULL, UMULL, SMULAL. */
13468 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13469 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13470 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13471 record_buf[2] = ARM_PS_REGNUM;
13472 thumb2_insn_r->reg_rec_count = 3;
13473 }
13474 else if (1 == opcode1 || 3 == opcode2)
13475 {
13476 /* Handle SDIV and UDIV. */
13477 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13478 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13479 record_buf[2] = ARM_PS_REGNUM;
13480 thumb2_insn_r->reg_rec_count = 3;
13481 }
13482 else
13483 return ARM_RECORD_FAILURE;
13484
13485 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13486 record_buf);
13487 return ARM_RECORD_SUCCESS;
13488 }
13489
13490 /* Record handler for thumb32 coprocessor instructions. */
13491
13492 static int
13493 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13494 {
13495 if (bit (thumb2_insn_r->arm_insn, 25))
13496 return arm_record_coproc_data_proc (thumb2_insn_r);
13497 else
13498 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13499 }
13500
13501 /* Record handler for advance SIMD structure load/store instructions. */
13502
13503 static int
13504 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13505 {
13506 struct regcache *reg_cache = thumb2_insn_r->regcache;
13507 uint32_t l_bit, a_bit, b_bits;
13508 uint32_t record_buf[128], record_buf_mem[128];
13509 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13510 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13511 uint8_t f_ebytes;
13512
13513 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13514 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13515 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13516 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13517 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13518 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13519 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13520 f_esize = 8 * f_ebytes;
13521 f_elem = 8 / f_ebytes;
13522
13523 if (!l_bit)
13524 {
13525 ULONGEST u_regval = 0;
13526 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13527 address = u_regval;
13528
13529 if (!a_bit)
13530 {
13531 /* Handle VST1. */
13532 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13533 {
13534 if (b_bits == 0x07)
13535 bf_regs = 1;
13536 else if (b_bits == 0x0a)
13537 bf_regs = 2;
13538 else if (b_bits == 0x06)
13539 bf_regs = 3;
13540 else if (b_bits == 0x02)
13541 bf_regs = 4;
13542 else
13543 bf_regs = 0;
13544
13545 for (index_r = 0; index_r < bf_regs; index_r++)
13546 {
13547 for (index_e = 0; index_e < f_elem; index_e++)
13548 {
13549 record_buf_mem[index_m++] = f_ebytes;
13550 record_buf_mem[index_m++] = address;
13551 address = address + f_ebytes;
13552 thumb2_insn_r->mem_rec_count += 1;
13553 }
13554 }
13555 }
13556 /* Handle VST2. */
13557 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13558 {
13559 if (b_bits == 0x09 || b_bits == 0x08)
13560 bf_regs = 1;
13561 else if (b_bits == 0x03)
13562 bf_regs = 2;
13563 else
13564 bf_regs = 0;
13565
13566 for (index_r = 0; index_r < bf_regs; index_r++)
13567 for (index_e = 0; index_e < f_elem; index_e++)
13568 {
13569 for (loop_t = 0; loop_t < 2; loop_t++)
13570 {
13571 record_buf_mem[index_m++] = f_ebytes;
13572 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13573 thumb2_insn_r->mem_rec_count += 1;
13574 }
13575 address = address + (2 * f_ebytes);
13576 }
13577 }
13578 /* Handle VST3. */
13579 else if ((b_bits & 0x0e) == 0x04)
13580 {
13581 for (index_e = 0; index_e < f_elem; index_e++)
13582 {
13583 for (loop_t = 0; loop_t < 3; loop_t++)
13584 {
13585 record_buf_mem[index_m++] = f_ebytes;
13586 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13587 thumb2_insn_r->mem_rec_count += 1;
13588 }
13589 address = address + (3 * f_ebytes);
13590 }
13591 }
13592 /* Handle VST4. */
13593 else if (!(b_bits & 0x0e))
13594 {
13595 for (index_e = 0; index_e < f_elem; index_e++)
13596 {
13597 for (loop_t = 0; loop_t < 4; loop_t++)
13598 {
13599 record_buf_mem[index_m++] = f_ebytes;
13600 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13601 thumb2_insn_r->mem_rec_count += 1;
13602 }
13603 address = address + (4 * f_ebytes);
13604 }
13605 }
13606 }
13607 else
13608 {
13609 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13610
13611 if (bft_size == 0x00)
13612 f_ebytes = 1;
13613 else if (bft_size == 0x01)
13614 f_ebytes = 2;
13615 else if (bft_size == 0x02)
13616 f_ebytes = 4;
13617 else
13618 f_ebytes = 0;
13619
13620 /* Handle VST1. */
13621 if (!(b_bits & 0x0b) || b_bits == 0x08)
13622 thumb2_insn_r->mem_rec_count = 1;
13623 /* Handle VST2. */
13624 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13625 thumb2_insn_r->mem_rec_count = 2;
13626 /* Handle VST3. */
13627 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13628 thumb2_insn_r->mem_rec_count = 3;
13629 /* Handle VST4. */
13630 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13631 thumb2_insn_r->mem_rec_count = 4;
13632
13633 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13634 {
13635 record_buf_mem[index_m] = f_ebytes;
13636 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13637 }
13638 }
13639 }
13640 else
13641 {
13642 if (!a_bit)
13643 {
13644 /* Handle VLD1. */
13645 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13646 thumb2_insn_r->reg_rec_count = 1;
13647 /* Handle VLD2. */
13648 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13649 thumb2_insn_r->reg_rec_count = 2;
13650 /* Handle VLD3. */
13651 else if ((b_bits & 0x0e) == 0x04)
13652 thumb2_insn_r->reg_rec_count = 3;
13653 /* Handle VLD4. */
13654 else if (!(b_bits & 0x0e))
13655 thumb2_insn_r->reg_rec_count = 4;
13656 }
13657 else
13658 {
13659 /* Handle VLD1. */
13660 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13661 thumb2_insn_r->reg_rec_count = 1;
13662 /* Handle VLD2. */
13663 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13664 thumb2_insn_r->reg_rec_count = 2;
13665 /* Handle VLD3. */
13666 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13667 thumb2_insn_r->reg_rec_count = 3;
13668 /* Handle VLD4. */
13669 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13670 thumb2_insn_r->reg_rec_count = 4;
13671
13672 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13673 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13674 }
13675 }
13676
13677 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13678 {
13679 record_buf[index_r] = reg_rn;
13680 thumb2_insn_r->reg_rec_count += 1;
13681 }
13682
13683 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13684 record_buf);
13685 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13686 record_buf_mem);
13687 return 0;
13688 }
13689
13690 /* Decodes thumb2 instruction type and invokes its record handler. */
13691
13692 static unsigned int
13693 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13694 {
13695 uint32_t op, op1, op2;
13696
13697 op = bit (thumb2_insn_r->arm_insn, 15);
13698 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13699 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13700
13701 if (op1 == 0x01)
13702 {
13703 if (!(op2 & 0x64 ))
13704 {
13705 /* Load/store multiple instruction. */
13706 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13707 }
13708 else if (!((op2 & 0x64) ^ 0x04))
13709 {
13710 /* Load/store (dual/exclusive) and table branch instruction. */
13711 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13712 }
13713 else if (!((op2 & 0x20) ^ 0x20))
13714 {
13715 /* Data-processing (shifted register). */
13716 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13717 }
13718 else if (op2 & 0x40)
13719 {
13720 /* Co-processor instructions. */
13721 return thumb2_record_coproc_insn (thumb2_insn_r);
13722 }
13723 }
13724 else if (op1 == 0x02)
13725 {
13726 if (op)
13727 {
13728 /* Branches and miscellaneous control instructions. */
13729 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13730 }
13731 else if (op2 & 0x20)
13732 {
13733 /* Data-processing (plain binary immediate) instruction. */
13734 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13735 }
13736 else
13737 {
13738 /* Data-processing (modified immediate). */
13739 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13740 }
13741 }
13742 else if (op1 == 0x03)
13743 {
13744 if (!(op2 & 0x71 ))
13745 {
13746 /* Store single data item. */
13747 return thumb2_record_str_single_data (thumb2_insn_r);
13748 }
13749 else if (!((op2 & 0x71) ^ 0x10))
13750 {
13751 /* Advanced SIMD or structure load/store instructions. */
13752 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13753 }
13754 else if (!((op2 & 0x67) ^ 0x01))
13755 {
13756 /* Load byte, memory hints instruction. */
13757 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13758 }
13759 else if (!((op2 & 0x67) ^ 0x03))
13760 {
13761 /* Load halfword, memory hints instruction. */
13762 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13763 }
13764 else if (!((op2 & 0x67) ^ 0x05))
13765 {
13766 /* Load word instruction. */
13767 return thumb2_record_ld_word (thumb2_insn_r);
13768 }
13769 else if (!((op2 & 0x70) ^ 0x20))
13770 {
13771 /* Data-processing (register) instruction. */
13772 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13773 }
13774 else if (!((op2 & 0x78) ^ 0x30))
13775 {
13776 /* Multiply, multiply accumulate, abs diff instruction. */
13777 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13778 }
13779 else if (!((op2 & 0x78) ^ 0x38))
13780 {
13781 /* Long multiply, long multiply accumulate, and divide. */
13782 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13783 }
13784 else if (op2 & 0x40)
13785 {
13786 /* Co-processor instructions. */
13787 return thumb2_record_coproc_insn (thumb2_insn_r);
13788 }
13789 }
13790
13791 return -1;
13792 }
13793
13794 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13795 and positive val on fauilure. */
13796
13797 static int
13798 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13799 {
13800 gdb_byte buf[insn_size];
13801
13802 memset (&buf[0], 0, insn_size);
13803
13804 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13805 return 1;
13806 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13807 insn_size,
13808 gdbarch_byte_order_for_code (insn_record->gdbarch));
13809 return 0;
13810 }
13811
13812 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13813
13814 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13815 dispatch it. */
13816
13817 static int
13818 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13819 uint32_t insn_size)
13820 {
13821
13822 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13823 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13824 {
13825 arm_record_data_proc_misc_ld_str, /* 000. */
13826 arm_record_data_proc_imm, /* 001. */
13827 arm_record_ld_st_imm_offset, /* 010. */
13828 arm_record_ld_st_reg_offset, /* 011. */
13829 arm_record_ld_st_multiple, /* 100. */
13830 arm_record_b_bl, /* 101. */
13831 arm_record_asimd_vfp_coproc, /* 110. */
13832 arm_record_coproc_data_proc /* 111. */
13833 };
13834
13835 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13836 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13837 { \
13838 thumb_record_shift_add_sub, /* 000. */
13839 thumb_record_add_sub_cmp_mov, /* 001. */
13840 thumb_record_ld_st_reg_offset, /* 010. */
13841 thumb_record_ld_st_imm_offset, /* 011. */
13842 thumb_record_ld_st_stack, /* 100. */
13843 thumb_record_misc, /* 101. */
13844 thumb_record_ldm_stm_swi, /* 110. */
13845 thumb_record_branch /* 111. */
13846 };
13847
13848 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13849 uint32_t insn_id = 0;
13850
13851 if (extract_arm_insn (arm_record, insn_size))
13852 {
13853 if (record_debug)
13854 {
13855 printf_unfiltered (_("Process record: error reading memory at "
13856 "addr %s len = %d.\n"),
13857 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13858 }
13859 return -1;
13860 }
13861 else if (ARM_RECORD == record_type)
13862 {
13863 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13864 insn_id = bits (arm_record->arm_insn, 25, 27);
13865 ret = arm_record_extension_space (arm_record);
13866 /* If this insn has fallen into extension space
13867 then we need not decode it anymore. */
13868 if (ret != -1 && !INSN_RECORDED(arm_record))
13869 {
13870 ret = arm_handle_insn[insn_id] (arm_record);
13871 }
13872 }
13873 else if (THUMB_RECORD == record_type)
13874 {
13875 /* As thumb does not have condition codes, we set negative. */
13876 arm_record->cond = -1;
13877 insn_id = bits (arm_record->arm_insn, 13, 15);
13878 ret = thumb_handle_insn[insn_id] (arm_record);
13879 }
13880 else if (THUMB2_RECORD == record_type)
13881 {
13882 /* As thumb does not have condition codes, we set negative. */
13883 arm_record->cond = -1;
13884
13885 /* Swap first half of 32bit thumb instruction with second half. */
13886 arm_record->arm_insn
13887 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13888
13889 insn_id = thumb2_record_decode_insn_handler (arm_record);
13890
13891 if (insn_id != ARM_RECORD_SUCCESS)
13892 {
13893 arm_record_unsupported_insn (arm_record);
13894 ret = -1;
13895 }
13896 }
13897 else
13898 {
13899 /* Throw assertion. */
13900 gdb_assert_not_reached ("not a valid instruction, could not decode");
13901 }
13902
13903 return ret;
13904 }
13905
13906
13907 /* Cleans up local record registers and memory allocations. */
13908
13909 static void
13910 deallocate_reg_mem (insn_decode_record *record)
13911 {
13912 xfree (record->arm_regs);
13913 xfree (record->arm_mems);
13914 }
13915
13916
13917 /* Parse the current instruction and record the values of the registers and
13918 memory that will be changed in current instruction to record_arch_list".
13919 Return -1 if something is wrong. */
13920
13921 int
13922 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13923 CORE_ADDR insn_addr)
13924 {
13925
13926 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13927 uint32_t no_of_rec = 0;
13928 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13929 ULONGEST t_bit = 0, insn_id = 0;
13930
13931 ULONGEST u_regval = 0;
13932
13933 insn_decode_record arm_record;
13934
13935 memset (&arm_record, 0, sizeof (insn_decode_record));
13936 arm_record.regcache = regcache;
13937 arm_record.this_addr = insn_addr;
13938 arm_record.gdbarch = gdbarch;
13939
13940
13941 if (record_debug > 1)
13942 {
13943 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13944 "addr = %s\n",
13945 paddress (gdbarch, arm_record.this_addr));
13946 }
13947
13948 if (extract_arm_insn (&arm_record, 2))
13949 {
13950 if (record_debug)
13951 {
13952 printf_unfiltered (_("Process record: error reading memory at "
13953 "addr %s len = %d.\n"),
13954 paddress (arm_record.gdbarch,
13955 arm_record.this_addr), 2);
13956 }
13957 return -1;
13958 }
13959
13960 /* Check the insn, whether it is thumb or arm one. */
13961
13962 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13963 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13964
13965
13966 if (!(u_regval & t_bit))
13967 {
13968 /* We are decoding arm insn. */
13969 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13970 }
13971 else
13972 {
13973 insn_id = bits (arm_record.arm_insn, 11, 15);
13974 /* is it thumb2 insn? */
13975 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13976 {
13977 ret = decode_insn (&arm_record, THUMB2_RECORD,
13978 THUMB2_INSN_SIZE_BYTES);
13979 }
13980 else
13981 {
13982 /* We are decoding thumb insn. */
13983 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13984 }
13985 }
13986
13987 if (0 == ret)
13988 {
13989 /* Record registers. */
13990 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13991 if (arm_record.arm_regs)
13992 {
13993 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13994 {
13995 if (record_full_arch_list_add_reg
13996 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13997 ret = -1;
13998 }
13999 }
14000 /* Record memories. */
14001 if (arm_record.arm_mems)
14002 {
14003 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14004 {
14005 if (record_full_arch_list_add_mem
14006 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14007 arm_record.arm_mems[no_of_rec].len))
14008 ret = -1;
14009 }
14010 }
14011
14012 if (record_full_arch_list_add_end ())
14013 ret = -1;
14014 }
14015
14016
14017 deallocate_reg_mem (&arm_record);
14018
14019 return ret;
14020 }
14021
This page took 0.305132 seconds and 3 git commands to generate.