34070452e1731be227b4af81011933a3643fcb5a
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47
48 #include "arm-tdep.h"
49 #include "gdb/sim-arm.h"
50
51 #include "elf-bfd.h"
52 #include "coff/internal.h"
53 #include "elf/arm.h"
54
55 #include "vec.h"
56
57 #include "record.h"
58 #include "record-full.h"
59
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
67
68 static int arm_debug;
69
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
73
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
76
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
79
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
82
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
85
86 struct arm_mapping_symbol
87 {
88 bfd_vma value;
89 char type;
90 };
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
93
94 struct arm_per_objfile
95 {
96 VEC(arm_mapping_symbol_s) **section_maps;
97 };
98
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
102
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
106 {
107 "auto",
108 "softfpa",
109 "fpa",
110 "softvfp",
111 "vfp",
112 NULL
113 };
114
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
118
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
121 {
122 "auto",
123 "APCS",
124 "AAPCS",
125 NULL
126 };
127
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
131
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
134 {
135 "auto",
136 "arm",
137 "thumb",
138 NULL
139 };
140
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
143
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
150
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
153
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
158 static const struct
159 {
160 const char *name;
161 int regnum;
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
164 { "r0", 0 },
165 { "r1", 1 },
166 { "r2", 2 },
167 { "r3", 3 },
168 { "r4", 4 },
169 { "r5", 5 },
170 { "r6", 6 },
171 { "r7", 7 },
172 { "r8", 8 },
173 { "r9", 9 },
174 { "r10", 10 },
175 { "r11", 11 },
176 { "r12", 12 },
177 { "r13", 13 },
178 { "r14", 14 },
179 { "r15", 15 },
180 /* Synonyms (argument and variable registers). */
181 { "a1", 0 },
182 { "a2", 1 },
183 { "a3", 2 },
184 { "a4", 3 },
185 { "v1", 4 },
186 { "v2", 5 },
187 { "v3", 6 },
188 { "v4", 7 },
189 { "v5", 8 },
190 { "v6", 9 },
191 { "v7", 10 },
192 { "v8", 11 },
193 /* Other platform-specific names for r9. */
194 { "sb", 9 },
195 { "tr", 9 },
196 /* Special names. */
197 { "ip", 12 },
198 { "lr", 14 },
199 /* Names used by GCC (not listed in the ARM EABI). */
200 { "sl", 10 },
201 /* A special name from the older ATPCS. */
202 { "wr", 7 },
203 };
204
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
213
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
216
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
219
220 /* This is used to keep the bfd arch_info in sync with the disassembly
221 style. */
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
225
226 static void convert_from_extended (const struct floatformat *, const void *,
227 void *, int);
228 static void convert_to_extended (const struct floatformat *, void *,
229 const void *, int);
230
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
237
238 static int thumb_insn_size (unsigned short inst1);
239
240 struct arm_prologue_cache
241 {
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
245 CORE_ADDR prev_sp;
246
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
250
251 int framesize;
252
253 /* The register used to hold the frame pointer for this frame. */
254 int framereg;
255
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
258 };
259
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
264
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
267
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
269
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
275
276 /* Set to true if the 32-bit mode is in use. */
277
278 int arm_apcs_32 = 1;
279
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
281
282 int
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
284 {
285 if (gdbarch_tdep (gdbarch)->is_m)
286 return XPSR_T;
287 else
288 return CPSR_T;
289 }
290
291 /* Determine if FRAME is executing in Thumb mode. */
292
293 int
294 arm_frame_is_thumb (struct frame_info *frame)
295 {
296 CORE_ADDR cpsr;
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
298
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
304
305 return (cpsr & t_bit) != 0;
306 }
307
308 /* Callback for VEC_lower_bound. */
309
310 static inline int
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
313 {
314 return lhs->value < rhs->value;
315 }
316
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
320
321 static char
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
323 {
324 struct obj_section *sec;
325
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
328 if (sec != NULL)
329 {
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
333 0 };
334 unsigned int idx;
335
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
337 if (data != NULL)
338 {
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
341 {
342 struct arm_mapping_symbol *map_sym;
343
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
346
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
352 {
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
355 {
356 if (start)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
359 }
360 }
361
362 if (idx > 0)
363 {
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
365 if (start)
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
368 }
369 }
370 }
371 }
372
373 return 0;
374 }
375
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
379
380 int
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
382 {
383 struct bound_minimal_symbol sym;
384 char type;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
387
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
390 if (dsc)
391 {
392 if (debug_displaced)
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
398 }
399
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
402 return 1;
403
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
407
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
410 return 0;
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
412 return 1;
413
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
416 return 1;
417
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
420 if (type)
421 return type == 't';
422
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
425 if (sym.minsym)
426 return (MSYMBOL_IS_SPECIAL (sym.minsym));
427
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
430 return 0;
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
432 return 1;
433
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
441
442 /* Otherwise we're out of luck; we assume ARM. */
443 return 0;
444 }
445
446 /* Remove useless bits from addresses in a running program. */
447 static CORE_ADDR
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
449 {
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch)->is_m
453 && (val & 0xfffffff0) == 0xfffffff0)
454 return val;
455
456 if (arm_apcs_32)
457 return UNMAKE_THUMB_ADDR (val);
458 else
459 return (val & 0x03fffffc);
460 }
461
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
465 is being called. */
466 static int
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
468 {
469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470 struct bound_minimal_symbol msym;
471
472 msym = lookup_minimal_symbol_by_pc (pc);
473 if (msym.minsym != NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
476 {
477 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
478
479 /* The GNU linker's Thumb call stub to foo is named
480 __foo_from_thumb. */
481 if (strstr (name, "_from_thumb") != NULL)
482 name += 2;
483
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
486 functions. */
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
488 return 1;
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
490 return 1;
491
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
494 return 1;
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
496 return 1;
497 }
498 else
499 {
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
504
505 if (!is_thumb
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
510 return 1;
511 }
512
513 return 0;
514 }
515
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
524
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
527 instruction. */
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
533
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
539
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
541
542 static unsigned int
543 thumb_expand_immediate (unsigned int imm)
544 {
545 unsigned int count = imm >> 7;
546
547 if (count < 8)
548 switch (count / 2)
549 {
550 case 0:
551 return imm & 0xff;
552 case 1:
553 return (imm & 0xff) | ((imm & 0xff) << 16);
554 case 2:
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
556 case 3:
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
559 }
560
561 return (0x80 | (imm & 0x7f)) << (32 - count);
562 }
563
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
566
567 static int
568 thumb_instruction_changes_pc (unsigned short inst)
569 {
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
571 return 1;
572
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
574 return 1;
575
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
577 return 1;
578
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
580 return 1;
581
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
583 return 1;
584
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
586 return 1;
587
588 return 0;
589 }
590
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
593
594 static int
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
596 {
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
598 {
599 /* Branches and miscellaneous control instructions. */
600
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
602 {
603 /* B, BL, BLX. */
604 return 1;
605 }
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
607 {
608 /* SUBS PC, LR, #imm8. */
609 return 1;
610 }
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
612 {
613 /* Conditional branch. */
614 return 1;
615 }
616
617 return 0;
618 }
619
620 if ((inst1 & 0xfe50) == 0xe810)
621 {
622 /* Load multiple or RFE. */
623
624 if (bit (inst1, 7) && !bit (inst1, 8))
625 {
626 /* LDMIA or POP */
627 if (bit (inst2, 15))
628 return 1;
629 }
630 else if (!bit (inst1, 7) && bit (inst1, 8))
631 {
632 /* LDMDB */
633 if (bit (inst2, 15))
634 return 1;
635 }
636 else if (bit (inst1, 7) && bit (inst1, 8))
637 {
638 /* RFEIA */
639 return 1;
640 }
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
642 {
643 /* RFEDB */
644 return 1;
645 }
646
647 return 0;
648 }
649
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
651 {
652 /* MOV PC or MOVS PC. */
653 return 1;
654 }
655
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
657 {
658 /* LDR PC. */
659 if (bits (inst1, 0, 3) == 15)
660 return 1;
661 if (bit (inst1, 7))
662 return 1;
663 if (bit (inst2, 11))
664 return 1;
665 if ((inst2 & 0x0fc0) == 0x0000)
666 return 1;
667
668 return 0;
669 }
670
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
672 {
673 /* TBB. */
674 return 1;
675 }
676
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
678 {
679 /* TBH. */
680 return 1;
681 }
682
683 return 0;
684 }
685
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
688
689 static int
690 thumb_instruction_restores_sp (unsigned short insn)
691 {
692 return (insn == 0x46bd /* mov sp, r7 */
693 || (insn & 0xff80) == 0xb000 /* add sp, imm */
694 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
695 }
696
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
702
703 static CORE_ADDR
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 CORE_ADDR start, CORE_ADDR limit,
706 struct arm_prologue_cache *cache)
707 {
708 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
710 int i;
711 pv_t regs[16];
712 struct pv_area *stack;
713 struct cleanup *back_to;
714 CORE_ADDR offset;
715 CORE_ADDR unrecognized_pc = 0;
716
717 for (i = 0; i < 16; i++)
718 regs[i] = pv_register (i, 0);
719 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720 back_to = make_cleanup_free_pv_area (stack);
721
722 while (start < limit)
723 {
724 unsigned short insn;
725
726 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
727
728 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
729 {
730 int regno;
731 int mask;
732
733 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
734 break;
735
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask = (insn & 0xff) | ((insn & 0x100) << 6);
739
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 if (mask & (1 << regno))
743 {
744 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
745 -4);
746 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
747 }
748 }
749 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
750 {
751 offset = (insn & 0x7f) << 2; /* get scaled offset */
752 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
753 -offset);
754 }
755 else if (thumb_instruction_restores_sp (insn))
756 {
757 /* Don't scan past the epilogue. */
758 break;
759 }
760 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
762 (insn & 0xff) << 2);
763 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
766 bits (insn, 6, 8));
767 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
770 bits (insn, 0, 7));
771 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 && pv_is_constant (regs[bits (insn, 3, 5)]))
774 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 regs[bits (insn, 6, 8)]);
776 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs[bits (insn, 3, 6)]))
778 {
779 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 int rm = bits (insn, 3, 6);
781 regs[rd] = pv_add (regs[rd], regs[rm]);
782 }
783 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
784 {
785 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 int src_reg = (insn & 0x78) >> 3;
787 regs[dst_reg] = regs[src_reg];
788 }
789 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
790 {
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno = (insn >> 8) & 0x7;
795 pv_t addr;
796
797 offset = (insn & 0xff) << 2;
798 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
799
800 if (pv_area_store_would_trash (stack, addr))
801 break;
802
803 pv_area_store (stack, addr, 4, regs[regno]);
804 }
805 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
806 {
807 int rd = bits (insn, 0, 2);
808 int rn = bits (insn, 3, 5);
809 pv_t addr;
810
811 offset = bits (insn, 6, 10) << 2;
812 addr = pv_add_constant (regs[rn], offset);
813
814 if (pv_area_store_would_trash (stack, addr))
815 break;
816
817 pv_area_store (stack, addr, 4, regs[rd]);
818 }
819 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 /* Ignore stores of argument registers to the stack. */
823 ;
824 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
828 ;
829 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 /* Similarly ignore single loads from the stack. */
833 ;
834 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
838 ;
839 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
841 on Thumb. */
842 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
844 {
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant;
847 CORE_ADDR loc;
848
849 loc = start + 4 + bits (insn, 0, 7) * 4;
850 constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 regs[bits (insn, 8, 10)] = pv_constant (constant);
852 }
853 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
854 {
855 unsigned short inst2;
856
857 inst2 = read_memory_unsigned_integer (start + 2, 2,
858 byte_order_for_code);
859
860 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
861 {
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
865 CORE_ADDR nextpc;
866 int j1, j2, imm1, imm2;
867
868 imm1 = sbits (insn, 0, 10);
869 imm2 = bits (inst2, 0, 10);
870 j1 = bit (inst2, 13);
871 j2 = bit (inst2, 11);
872
873 offset = ((imm1 << 12) + (imm2 << 1));
874 offset ^= ((!j2) << 22) | ((!j1) << 23);
875
876 nextpc = start + 4 + offset;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2, 12) == 0)
879 nextpc = nextpc & 0xfffffffc;
880
881 if (!skip_prologue_function (gdbarch, nextpc,
882 bit (inst2, 12) != 0))
883 break;
884 }
885
886 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
887 { registers } */
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
889 {
890 pv_t addr = regs[bits (insn, 0, 3)];
891 int regno;
892
893 if (pv_area_store_would_trash (stack, addr))
894 break;
895
896 /* Calculate offsets of saved registers. */
897 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 if (inst2 & (1 << regno))
899 {
900 addr = pv_add_constant (addr, -4);
901 pv_area_store (stack, addr, 4, regs[regno]);
902 }
903
904 if (insn & 0x0020)
905 regs[bits (insn, 0, 3)] = addr;
906 }
907
908 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
909 [Rn, #+/-imm]{!} */
910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
911 {
912 int regno1 = bits (inst2, 12, 15);
913 int regno2 = bits (inst2, 8, 11);
914 pv_t addr = regs[bits (insn, 0, 3)];
915
916 offset = inst2 & 0xff;
917 if (insn & 0x0080)
918 addr = pv_add_constant (addr, offset);
919 else
920 addr = pv_add_constant (addr, -offset);
921
922 if (pv_area_store_would_trash (stack, addr))
923 break;
924
925 pv_area_store (stack, addr, 4, regs[regno1]);
926 pv_area_store (stack, pv_add_constant (addr, 4),
927 4, regs[regno2]);
928
929 if (insn & 0x0020)
930 regs[bits (insn, 0, 3)] = addr;
931 }
932
933 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2 & 0x0c00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 {
937 int regno = bits (inst2, 12, 15);
938 pv_t addr = regs[bits (insn, 0, 3)];
939
940 offset = inst2 & 0xff;
941 if (inst2 & 0x0200)
942 addr = pv_add_constant (addr, offset);
943 else
944 addr = pv_add_constant (addr, -offset);
945
946 if (pv_area_store_would_trash (stack, addr))
947 break;
948
949 pv_area_store (stack, addr, 4, regs[regno]);
950
951 if (inst2 & 0x0100)
952 regs[bits (insn, 0, 3)] = addr;
953 }
954
955 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
957 {
958 int regno = bits (inst2, 12, 15);
959 pv_t addr;
960
961 offset = inst2 & 0xfff;
962 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
963
964 if (pv_area_store_would_trash (stack, addr))
965 break;
966
967 pv_area_store (stack, addr, 4, regs[regno]);
968 }
969
970 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
973 ;
974
975 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Ignore stores of argument registers to the stack. */
979 ;
980
981 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
982 { registers } */
983 && (inst2 & 0x8000) == 0x0000
984 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
987 ;
988
989 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
990 [Rn, #+/-imm] */
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore dual loads from the stack. */
993 ;
994
995 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2 & 0x0d00) == 0x0c00
997 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 /* Similarly ignore single loads from the stack. */
999 ;
1000
1001 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 /* Similarly ignore single loads from the stack. */
1004 ;
1005
1006 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1008 {
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1012
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)],
1015 thumb_expand_immediate (imm));
1016 }
1017
1018 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1020 {
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1024
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1027 }
1028
1029 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1031 {
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1035
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)],
1038 - (CORE_ADDR) thumb_expand_immediate (imm));
1039 }
1040
1041 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2 & 0x8000) == 0x0000)
1043 {
1044 unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 | (bits (inst2, 12, 14) << 8)
1046 | bits (inst2, 0, 7));
1047
1048 regs[bits (inst2, 8, 11)]
1049 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1050 }
1051
1052 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1053 {
1054 unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 | (bits (inst2, 12, 14) << 8)
1056 | bits (inst2, 0, 7));
1057
1058 regs[bits (inst2, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm));
1060 }
1061
1062 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1063 {
1064 unsigned int imm
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1066
1067 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1068 }
1069
1070 else if (insn == 0xea5f /* mov.w Rd,Rm */
1071 && (inst2 & 0xf0f0) == 0)
1072 {
1073 int dst_reg = (inst2 & 0x0f00) >> 8;
1074 int src_reg = inst2 & 0xf;
1075 regs[dst_reg] = regs[src_reg];
1076 }
1077
1078 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1079 {
1080 /* Constant pool loads. */
1081 unsigned int constant;
1082 CORE_ADDR loc;
1083
1084 offset = bits (inst2, 0, 11);
1085 if (insn & 0x0080)
1086 loc = start + 4 + offset;
1087 else
1088 loc = start + 4 - offset;
1089
1090 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1092 }
1093
1094 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1095 {
1096 /* Constant pool loads. */
1097 unsigned int constant;
1098 CORE_ADDR loc;
1099
1100 offset = bits (inst2, 0, 7) << 2;
1101 if (insn & 0x0080)
1102 loc = start + 4 + offset;
1103 else
1104 loc = start + 4 - offset;
1105
1106 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1108
1109 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1111 }
1112
1113 else if (thumb2_instruction_changes_pc (insn, inst2))
1114 {
1115 /* Don't scan past anything that might change control flow. */
1116 break;
1117 }
1118 else
1119 {
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1123 }
1124
1125 start += 2;
1126 }
1127 else if (thumb_instruction_changes_pc (insn))
1128 {
1129 /* Don't scan past anything that might change control flow. */
1130 break;
1131 }
1132 else
1133 {
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc = start;
1137 }
1138
1139 start += 2;
1140 }
1141
1142 if (arm_debug)
1143 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch, start));
1145
1146 if (unrecognized_pc == 0)
1147 unrecognized_pc = start;
1148
1149 if (cache == NULL)
1150 {
1151 do_cleanups (back_to);
1152 return unrecognized_pc;
1153 }
1154
1155 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1156 {
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache->framereg = ARM_FP_REGNUM;
1159 cache->framesize = -regs[ARM_FP_REGNUM].k;
1160 }
1161 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1162 {
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache->framereg = THUMB_FP_REGNUM;
1165 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1166 }
1167 else
1168 {
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache->framereg = ARM_SP_REGNUM;
1171 cache->framesize = -regs[ARM_SP_REGNUM].k;
1172 }
1173
1174 for (i = 0; i < 16; i++)
1175 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176 cache->saved_regs[i].addr = offset;
1177
1178 do_cleanups (back_to);
1179 return unrecognized_pc;
1180 }
1181
1182
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1187 not recognized. */
1188
1189 static CORE_ADDR
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 unsigned int *destreg, int *offset)
1192 {
1193 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 unsigned int low, high, address;
1196
1197 address = 0;
1198 if (is_thumb)
1199 {
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1202
1203 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1204 {
1205 *destreg = bits (insn1, 8, 10);
1206 *offset = 2;
1207 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1208 address = read_memory_unsigned_integer (address, 4,
1209 byte_order_for_code);
1210 }
1211 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1212 {
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1215
1216 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1217
1218 insn1
1219 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1220 insn2
1221 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1222
1223 /* movt Rd, #const */
1224 if ((insn1 & 0xfbc0) == 0xf2c0)
1225 {
1226 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1227 *destreg = bits (insn2, 8, 11);
1228 *offset = 8;
1229 address = (high << 16 | low);
1230 }
1231 }
1232 }
1233 else
1234 {
1235 unsigned int insn
1236 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1237
1238 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1239 {
1240 address = bits (insn, 0, 11) + pc + 8;
1241 address = read_memory_unsigned_integer (address, 4,
1242 byte_order_for_code);
1243
1244 *destreg = bits (insn, 12, 15);
1245 *offset = 4;
1246 }
1247 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1248 {
1249 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1250
1251 insn
1252 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1253
1254 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1255 {
1256 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1257 *destreg = bits (insn, 12, 15);
1258 *offset = 8;
1259 address = (high << 16 | low);
1260 }
1261 }
1262 }
1263
1264 return address;
1265 }
1266
1267 /* Try to skip a sequence of instructions used for stack protector. If PC
1268 points to the first instruction of this sequence, return the address of
1269 first instruction after this sequence, otherwise, return original PC.
1270
1271 On arm, this sequence of instructions is composed of mainly three steps,
1272 Step 1: load symbol __stack_chk_guard,
1273 Step 2: load from address of __stack_chk_guard,
1274 Step 3: store it to somewhere else.
1275
1276 Usually, instructions on step 2 and step 3 are the same on various ARM
1277 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1279 instructions in step 1 vary from different ARM architectures. On ARMv7,
1280 they are,
1281
1282 movw Rn, #:lower16:__stack_chk_guard
1283 movt Rn, #:upper16:__stack_chk_guard
1284
1285 On ARMv5t, it is,
1286
1287 ldr Rn, .Label
1288 ....
1289 .Lable:
1290 .word __stack_chk_guard
1291
1292 Since ldr/str is a very popular instruction, we can't use them as
1293 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1294 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1296
1297 static CORE_ADDR
1298 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1299 {
1300 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1301 unsigned int basereg;
1302 struct bound_minimal_symbol stack_chk_guard;
1303 int offset;
1304 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1305 CORE_ADDR addr;
1306
1307 /* Try to parse the instructions in Step 1. */
1308 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1309 &basereg, &offset);
1310 if (!addr)
1311 return pc;
1312
1313 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1314 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315 Otherwise, this sequence cannot be for stack protector. */
1316 if (stack_chk_guard.minsym == NULL
1317 || strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1318 "__stack_chk_guard",
1319 strlen ("__stack_chk_guard")) != 0)
1320 return pc;
1321
1322 if (is_thumb)
1323 {
1324 unsigned int destreg;
1325 unsigned short insn
1326 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1327
1328 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1329 if ((insn & 0xf800) != 0x6800)
1330 return pc;
1331 if (bits (insn, 3, 5) != basereg)
1332 return pc;
1333 destreg = bits (insn, 0, 2);
1334
1335 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1336 byte_order_for_code);
1337 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1338 if ((insn & 0xf800) != 0x6000)
1339 return pc;
1340 if (destreg != bits (insn, 0, 2))
1341 return pc;
1342 }
1343 else
1344 {
1345 unsigned int destreg;
1346 unsigned int insn
1347 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1348
1349 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1350 if ((insn & 0x0e500000) != 0x04100000)
1351 return pc;
1352 if (bits (insn, 16, 19) != basereg)
1353 return pc;
1354 destreg = bits (insn, 12, 15);
1355 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1356 insn = read_memory_unsigned_integer (pc + offset + 4,
1357 4, byte_order_for_code);
1358 if ((insn & 0x0e500000) != 0x04000000)
1359 return pc;
1360 if (bits (insn, 12, 15) != destreg)
1361 return pc;
1362 }
1363 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1364 on arm. */
1365 if (is_thumb)
1366 return pc + offset + 4;
1367 else
1368 return pc + offset + 8;
1369 }
1370
1371 /* Advance the PC across any function entry prologue instructions to
1372 reach some "real" code.
1373
1374 The APCS (ARM Procedure Call Standard) defines the following
1375 prologue:
1376
1377 mov ip, sp
1378 [stmfd sp!, {a1,a2,a3,a4}]
1379 stmfd sp!, {...,fp,ip,lr,pc}
1380 [stfe f7, [sp, #-12]!]
1381 [stfe f6, [sp, #-12]!]
1382 [stfe f5, [sp, #-12]!]
1383 [stfe f4, [sp, #-12]!]
1384 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1385
1386 static CORE_ADDR
1387 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1388 {
1389 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1390 unsigned long inst;
1391 CORE_ADDR skip_pc;
1392 CORE_ADDR func_addr, limit_pc;
1393
1394 /* See if we can determine the end of the prologue via the symbol table.
1395 If so, then return either PC, or the PC after the prologue, whichever
1396 is greater. */
1397 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1398 {
1399 CORE_ADDR post_prologue_pc
1400 = skip_prologue_using_sal (gdbarch, func_addr);
1401 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1402
1403 if (post_prologue_pc)
1404 post_prologue_pc
1405 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1406
1407
1408 /* GCC always emits a line note before the prologue and another
1409 one after, even if the two are at the same address or on the
1410 same line. Take advantage of this so that we do not need to
1411 know every instruction that might appear in the prologue. We
1412 will have producer information for most binaries; if it is
1413 missing (e.g. for -gstabs), assuming the GNU tools. */
1414 if (post_prologue_pc
1415 && (cust == NULL
1416 || COMPUNIT_PRODUCER (cust) == NULL
1417 || strncmp (COMPUNIT_PRODUCER (cust), "GNU ",
1418 sizeof ("GNU ") - 1) == 0
1419 || strncmp (COMPUNIT_PRODUCER (cust), "clang ",
1420 sizeof ("clang ") - 1) == 0))
1421 return post_prologue_pc;
1422
1423 if (post_prologue_pc != 0)
1424 {
1425 CORE_ADDR analyzed_limit;
1426
1427 /* For non-GCC compilers, make sure the entire line is an
1428 acceptable prologue; GDB will round this function's
1429 return value up to the end of the following line so we
1430 can not skip just part of a line (and we do not want to).
1431
1432 RealView does not treat the prologue specially, but does
1433 associate prologue code with the opening brace; so this
1434 lets us skip the first line if we think it is the opening
1435 brace. */
1436 if (arm_pc_is_thumb (gdbarch, func_addr))
1437 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1438 post_prologue_pc, NULL);
1439 else
1440 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1441 post_prologue_pc, NULL);
1442
1443 if (analyzed_limit != post_prologue_pc)
1444 return func_addr;
1445
1446 return post_prologue_pc;
1447 }
1448 }
1449
1450 /* Can't determine prologue from the symbol table, need to examine
1451 instructions. */
1452
1453 /* Find an upper limit on the function prologue using the debug
1454 information. If the debug information could not be used to provide
1455 that bound, then use an arbitrary large number as the upper bound. */
1456 /* Like arm_scan_prologue, stop no later than pc + 64. */
1457 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1458 if (limit_pc == 0)
1459 limit_pc = pc + 64; /* Magic. */
1460
1461
1462 /* Check if this is Thumb code. */
1463 if (arm_pc_is_thumb (gdbarch, pc))
1464 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1465
1466 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1467 {
1468 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1469
1470 /* "mov ip, sp" is no longer a required part of the prologue. */
1471 if (inst == 0xe1a0c00d) /* mov ip, sp */
1472 continue;
1473
1474 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1475 continue;
1476
1477 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1478 continue;
1479
1480 /* Some prologues begin with "str lr, [sp, #-4]!". */
1481 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1482 continue;
1483
1484 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1485 continue;
1486
1487 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1488 continue;
1489
1490 /* Any insns after this point may float into the code, if it makes
1491 for better instruction scheduling, so we skip them only if we
1492 find them, but still consider the function to be frame-ful. */
1493
1494 /* We may have either one sfmfd instruction here, or several stfe
1495 insns, depending on the version of floating point code we
1496 support. */
1497 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1498 continue;
1499
1500 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1501 continue;
1502
1503 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1504 continue;
1505
1506 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1507 continue;
1508
1509 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1510 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1511 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1512 continue;
1513
1514 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1515 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1516 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1517 continue;
1518
1519 /* Un-recognized instruction; stop scanning. */
1520 break;
1521 }
1522
1523 return skip_pc; /* End of prologue. */
1524 }
1525
1526 /* *INDENT-OFF* */
1527 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1528 This function decodes a Thumb function prologue to determine:
1529 1) the size of the stack frame
1530 2) which registers are saved on it
1531 3) the offsets of saved regs
1532 4) the offset from the stack pointer to the frame pointer
1533
1534 A typical Thumb function prologue would create this stack frame
1535 (offsets relative to FP)
1536 old SP -> 24 stack parameters
1537 20 LR
1538 16 R7
1539 R7 -> 0 local variables (16 bytes)
1540 SP -> -12 additional stack space (12 bytes)
1541 The frame size would thus be 36 bytes, and the frame offset would be
1542 12 bytes. The frame register is R7.
1543
1544 The comments for thumb_skip_prolog() describe the algorithm we use
1545 to detect the end of the prolog. */
1546 /* *INDENT-ON* */
1547
1548 static void
1549 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1550 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1551 {
1552 CORE_ADDR prologue_start;
1553 CORE_ADDR prologue_end;
1554
1555 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1556 &prologue_end))
1557 {
1558 /* See comment in arm_scan_prologue for an explanation of
1559 this heuristics. */
1560 if (prologue_end > prologue_start + 64)
1561 {
1562 prologue_end = prologue_start + 64;
1563 }
1564 }
1565 else
1566 /* We're in the boondocks: we have no idea where the start of the
1567 function is. */
1568 return;
1569
1570 prologue_end = min (prologue_end, prev_pc);
1571
1572 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1573 }
1574
1575 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1576
1577 static int
1578 arm_instruction_changes_pc (uint32_t this_instr)
1579 {
1580 if (bits (this_instr, 28, 31) == INST_NV)
1581 /* Unconditional instructions. */
1582 switch (bits (this_instr, 24, 27))
1583 {
1584 case 0xa:
1585 case 0xb:
1586 /* Branch with Link and change to Thumb. */
1587 return 1;
1588 case 0xc:
1589 case 0xd:
1590 case 0xe:
1591 /* Coprocessor register transfer. */
1592 if (bits (this_instr, 12, 15) == 15)
1593 error (_("Invalid update to pc in instruction"));
1594 return 0;
1595 default:
1596 return 0;
1597 }
1598 else
1599 switch (bits (this_instr, 25, 27))
1600 {
1601 case 0x0:
1602 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1603 {
1604 /* Multiplies and extra load/stores. */
1605 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1606 /* Neither multiplies nor extension load/stores are allowed
1607 to modify PC. */
1608 return 0;
1609
1610 /* Otherwise, miscellaneous instructions. */
1611
1612 /* BX <reg>, BXJ <reg>, BLX <reg> */
1613 if (bits (this_instr, 4, 27) == 0x12fff1
1614 || bits (this_instr, 4, 27) == 0x12fff2
1615 || bits (this_instr, 4, 27) == 0x12fff3)
1616 return 1;
1617
1618 /* Other miscellaneous instructions are unpredictable if they
1619 modify PC. */
1620 return 0;
1621 }
1622 /* Data processing instruction. Fall through. */
1623
1624 case 0x1:
1625 if (bits (this_instr, 12, 15) == 15)
1626 return 1;
1627 else
1628 return 0;
1629
1630 case 0x2:
1631 case 0x3:
1632 /* Media instructions and architecturally undefined instructions. */
1633 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1634 return 0;
1635
1636 /* Stores. */
1637 if (bit (this_instr, 20) == 0)
1638 return 0;
1639
1640 /* Loads. */
1641 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1642 return 1;
1643 else
1644 return 0;
1645
1646 case 0x4:
1647 /* Load/store multiple. */
1648 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1649 return 1;
1650 else
1651 return 0;
1652
1653 case 0x5:
1654 /* Branch and branch with link. */
1655 return 1;
1656
1657 case 0x6:
1658 case 0x7:
1659 /* Coprocessor transfers or SWIs can not affect PC. */
1660 return 0;
1661
1662 default:
1663 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1664 }
1665 }
1666
1667 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1668 otherwise. */
1669
1670 static int
1671 arm_instruction_restores_sp (unsigned int insn)
1672 {
1673 if (bits (insn, 28, 31) != INST_NV)
1674 {
1675 if ((insn & 0x0df0f000) == 0x0080d000
1676 /* ADD SP (register or immediate). */
1677 || (insn & 0x0df0f000) == 0x0040d000
1678 /* SUB SP (register or immediate). */
1679 || (insn & 0x0ffffff0) == 0x01a0d000
1680 /* MOV SP. */
1681 || (insn & 0x0fff0000) == 0x08bd0000
1682 /* POP (LDMIA). */
1683 || (insn & 0x0fff0000) == 0x049d0000)
1684 /* POP of a single register. */
1685 return 1;
1686 }
1687
1688 return 0;
1689 }
1690
1691 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1692 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1693 fill it in. Return the first address not recognized as a prologue
1694 instruction.
1695
1696 We recognize all the instructions typically found in ARM prologues,
1697 plus harmless instructions which can be skipped (either for analysis
1698 purposes, or a more restrictive set that can be skipped when finding
1699 the end of the prologue). */
1700
1701 static CORE_ADDR
1702 arm_analyze_prologue (struct gdbarch *gdbarch,
1703 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1704 struct arm_prologue_cache *cache)
1705 {
1706 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1707 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1708 int regno;
1709 CORE_ADDR offset, current_pc;
1710 pv_t regs[ARM_FPS_REGNUM];
1711 struct pv_area *stack;
1712 struct cleanup *back_to;
1713 CORE_ADDR unrecognized_pc = 0;
1714
1715 /* Search the prologue looking for instructions that set up the
1716 frame pointer, adjust the stack pointer, and save registers.
1717
1718 Be careful, however, and if it doesn't look like a prologue,
1719 don't try to scan it. If, for instance, a frameless function
1720 begins with stmfd sp!, then we will tell ourselves there is
1721 a frame, which will confuse stack traceback, as well as "finish"
1722 and other operations that rely on a knowledge of the stack
1723 traceback. */
1724
1725 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1726 regs[regno] = pv_register (regno, 0);
1727 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1728 back_to = make_cleanup_free_pv_area (stack);
1729
1730 for (current_pc = prologue_start;
1731 current_pc < prologue_end;
1732 current_pc += 4)
1733 {
1734 unsigned int insn
1735 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1736
1737 if (insn == 0xe1a0c00d) /* mov ip, sp */
1738 {
1739 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1740 continue;
1741 }
1742 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1743 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1744 {
1745 unsigned imm = insn & 0xff; /* immediate value */
1746 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1747 int rd = bits (insn, 12, 15);
1748 imm = (imm >> rot) | (imm << (32 - rot));
1749 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1750 continue;
1751 }
1752 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1753 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1754 {
1755 unsigned imm = insn & 0xff; /* immediate value */
1756 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1757 int rd = bits (insn, 12, 15);
1758 imm = (imm >> rot) | (imm << (32 - rot));
1759 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1760 continue;
1761 }
1762 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1763 [sp, #-4]! */
1764 {
1765 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1766 break;
1767 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1768 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1769 regs[bits (insn, 12, 15)]);
1770 continue;
1771 }
1772 else if ((insn & 0xffff0000) == 0xe92d0000)
1773 /* stmfd sp!, {..., fp, ip, lr, pc}
1774 or
1775 stmfd sp!, {a1, a2, a3, a4} */
1776 {
1777 int mask = insn & 0xffff;
1778
1779 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1780 break;
1781
1782 /* Calculate offsets of saved registers. */
1783 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1784 if (mask & (1 << regno))
1785 {
1786 regs[ARM_SP_REGNUM]
1787 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1788 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1789 }
1790 }
1791 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1792 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1793 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1794 {
1795 /* No need to add this to saved_regs -- it's just an arg reg. */
1796 continue;
1797 }
1798 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1799 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1800 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1801 {
1802 /* No need to add this to saved_regs -- it's just an arg reg. */
1803 continue;
1804 }
1805 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1806 { registers } */
1807 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1808 {
1809 /* No need to add this to saved_regs -- it's just arg regs. */
1810 continue;
1811 }
1812 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1813 {
1814 unsigned imm = insn & 0xff; /* immediate value */
1815 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1816 imm = (imm >> rot) | (imm << (32 - rot));
1817 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1818 }
1819 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1820 {
1821 unsigned imm = insn & 0xff; /* immediate value */
1822 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1823 imm = (imm >> rot) | (imm << (32 - rot));
1824 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1825 }
1826 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1827 [sp, -#c]! */
1828 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1829 {
1830 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1831 break;
1832
1833 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1834 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1835 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1836 }
1837 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1838 [sp!] */
1839 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1840 {
1841 int n_saved_fp_regs;
1842 unsigned int fp_start_reg, fp_bound_reg;
1843
1844 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1845 break;
1846
1847 if ((insn & 0x800) == 0x800) /* N0 is set */
1848 {
1849 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1850 n_saved_fp_regs = 3;
1851 else
1852 n_saved_fp_regs = 1;
1853 }
1854 else
1855 {
1856 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1857 n_saved_fp_regs = 2;
1858 else
1859 n_saved_fp_regs = 4;
1860 }
1861
1862 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1863 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1864 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1865 {
1866 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1867 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1868 regs[fp_start_reg++]);
1869 }
1870 }
1871 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1872 {
1873 /* Allow some special function calls when skipping the
1874 prologue; GCC generates these before storing arguments to
1875 the stack. */
1876 CORE_ADDR dest = BranchDest (current_pc, insn);
1877
1878 if (skip_prologue_function (gdbarch, dest, 0))
1879 continue;
1880 else
1881 break;
1882 }
1883 else if ((insn & 0xf0000000) != 0xe0000000)
1884 break; /* Condition not true, exit early. */
1885 else if (arm_instruction_changes_pc (insn))
1886 /* Don't scan past anything that might change control flow. */
1887 break;
1888 else if (arm_instruction_restores_sp (insn))
1889 {
1890 /* Don't scan past the epilogue. */
1891 break;
1892 }
1893 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1894 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1895 /* Ignore block loads from the stack, potentially copying
1896 parameters from memory. */
1897 continue;
1898 else if ((insn & 0xfc500000) == 0xe4100000
1899 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1900 /* Similarly ignore single loads from the stack. */
1901 continue;
1902 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1903 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1904 register instead of the stack. */
1905 continue;
1906 else
1907 {
1908 /* The optimizer might shove anything into the prologue,
1909 so we just skip what we don't recognize. */
1910 unrecognized_pc = current_pc;
1911 continue;
1912 }
1913 }
1914
1915 if (unrecognized_pc == 0)
1916 unrecognized_pc = current_pc;
1917
1918 if (cache)
1919 {
1920 int framereg, framesize;
1921
1922 /* The frame size is just the distance from the frame register
1923 to the original stack pointer. */
1924 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1925 {
1926 /* Frame pointer is fp. */
1927 framereg = ARM_FP_REGNUM;
1928 framesize = -regs[ARM_FP_REGNUM].k;
1929 }
1930 else
1931 {
1932 /* Try the stack pointer... this is a bit desperate. */
1933 framereg = ARM_SP_REGNUM;
1934 framesize = -regs[ARM_SP_REGNUM].k;
1935 }
1936
1937 cache->framereg = framereg;
1938 cache->framesize = framesize;
1939
1940 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1941 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1942 cache->saved_regs[regno].addr = offset;
1943 }
1944
1945 if (arm_debug)
1946 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1947 paddress (gdbarch, unrecognized_pc));
1948
1949 do_cleanups (back_to);
1950 return unrecognized_pc;
1951 }
1952
1953 static void
1954 arm_scan_prologue (struct frame_info *this_frame,
1955 struct arm_prologue_cache *cache)
1956 {
1957 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1958 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1959 int regno;
1960 CORE_ADDR prologue_start, prologue_end, current_pc;
1961 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1962 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1963 pv_t regs[ARM_FPS_REGNUM];
1964 struct pv_area *stack;
1965 struct cleanup *back_to;
1966 CORE_ADDR offset;
1967
1968 /* Assume there is no frame until proven otherwise. */
1969 cache->framereg = ARM_SP_REGNUM;
1970 cache->framesize = 0;
1971
1972 /* Check for Thumb prologue. */
1973 if (arm_frame_is_thumb (this_frame))
1974 {
1975 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1976 return;
1977 }
1978
1979 /* Find the function prologue. If we can't find the function in
1980 the symbol table, peek in the stack frame to find the PC. */
1981 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1982 &prologue_end))
1983 {
1984 /* One way to find the end of the prologue (which works well
1985 for unoptimized code) is to do the following:
1986
1987 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1988
1989 if (sal.line == 0)
1990 prologue_end = prev_pc;
1991 else if (sal.end < prologue_end)
1992 prologue_end = sal.end;
1993
1994 This mechanism is very accurate so long as the optimizer
1995 doesn't move any instructions from the function body into the
1996 prologue. If this happens, sal.end will be the last
1997 instruction in the first hunk of prologue code just before
1998 the first instruction that the scheduler has moved from
1999 the body to the prologue.
2000
2001 In order to make sure that we scan all of the prologue
2002 instructions, we use a slightly less accurate mechanism which
2003 may scan more than necessary. To help compensate for this
2004 lack of accuracy, the prologue scanning loop below contains
2005 several clauses which'll cause the loop to terminate early if
2006 an implausible prologue instruction is encountered.
2007
2008 The expression
2009
2010 prologue_start + 64
2011
2012 is a suitable endpoint since it accounts for the largest
2013 possible prologue plus up to five instructions inserted by
2014 the scheduler. */
2015
2016 if (prologue_end > prologue_start + 64)
2017 {
2018 prologue_end = prologue_start + 64; /* See above. */
2019 }
2020 }
2021 else
2022 {
2023 /* We have no symbol information. Our only option is to assume this
2024 function has a standard stack frame and the normal frame register.
2025 Then, we can find the value of our frame pointer on entrance to
2026 the callee (or at the present moment if this is the innermost frame).
2027 The value stored there should be the address of the stmfd + 8. */
2028 CORE_ADDR frame_loc;
2029 LONGEST return_value;
2030
2031 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2032 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
2033 return;
2034 else
2035 {
2036 prologue_start = gdbarch_addr_bits_remove
2037 (gdbarch, return_value) - 8;
2038 prologue_end = prologue_start + 64; /* See above. */
2039 }
2040 }
2041
2042 if (prev_pc < prologue_end)
2043 prologue_end = prev_pc;
2044
2045 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2046 }
2047
2048 static struct arm_prologue_cache *
2049 arm_make_prologue_cache (struct frame_info *this_frame)
2050 {
2051 int reg;
2052 struct arm_prologue_cache *cache;
2053 CORE_ADDR unwound_fp;
2054
2055 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2056 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2057
2058 arm_scan_prologue (this_frame, cache);
2059
2060 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2061 if (unwound_fp == 0)
2062 return cache;
2063
2064 cache->prev_sp = unwound_fp + cache->framesize;
2065
2066 /* Calculate actual addresses of saved registers using offsets
2067 determined by arm_scan_prologue. */
2068 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2069 if (trad_frame_addr_p (cache->saved_regs, reg))
2070 cache->saved_regs[reg].addr += cache->prev_sp;
2071
2072 return cache;
2073 }
2074
2075 /* Our frame ID for a normal frame is the current function's starting PC
2076 and the caller's SP when we were called. */
2077
2078 static void
2079 arm_prologue_this_id (struct frame_info *this_frame,
2080 void **this_cache,
2081 struct frame_id *this_id)
2082 {
2083 struct arm_prologue_cache *cache;
2084 struct frame_id id;
2085 CORE_ADDR pc, func;
2086
2087 if (*this_cache == NULL)
2088 *this_cache = arm_make_prologue_cache (this_frame);
2089 cache = *this_cache;
2090
2091 /* This is meant to halt the backtrace at "_start". */
2092 pc = get_frame_pc (this_frame);
2093 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2094 return;
2095
2096 /* If we've hit a wall, stop. */
2097 if (cache->prev_sp == 0)
2098 return;
2099
2100 /* Use function start address as part of the frame ID. If we cannot
2101 identify the start address (due to missing symbol information),
2102 fall back to just using the current PC. */
2103 func = get_frame_func (this_frame);
2104 if (!func)
2105 func = pc;
2106
2107 id = frame_id_build (cache->prev_sp, func);
2108 *this_id = id;
2109 }
2110
2111 static struct value *
2112 arm_prologue_prev_register (struct frame_info *this_frame,
2113 void **this_cache,
2114 int prev_regnum)
2115 {
2116 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2117 struct arm_prologue_cache *cache;
2118
2119 if (*this_cache == NULL)
2120 *this_cache = arm_make_prologue_cache (this_frame);
2121 cache = *this_cache;
2122
2123 /* If we are asked to unwind the PC, then we need to return the LR
2124 instead. The prologue may save PC, but it will point into this
2125 frame's prologue, not the next frame's resume location. Also
2126 strip the saved T bit. A valid LR may have the low bit set, but
2127 a valid PC never does. */
2128 if (prev_regnum == ARM_PC_REGNUM)
2129 {
2130 CORE_ADDR lr;
2131
2132 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2133 return frame_unwind_got_constant (this_frame, prev_regnum,
2134 arm_addr_bits_remove (gdbarch, lr));
2135 }
2136
2137 /* SP is generally not saved to the stack, but this frame is
2138 identified by the next frame's stack pointer at the time of the call.
2139 The value was already reconstructed into PREV_SP. */
2140 if (prev_regnum == ARM_SP_REGNUM)
2141 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2142
2143 /* The CPSR may have been changed by the call instruction and by the
2144 called function. The only bit we can reconstruct is the T bit,
2145 by checking the low bit of LR as of the call. This is a reliable
2146 indicator of Thumb-ness except for some ARM v4T pre-interworking
2147 Thumb code, which could get away with a clear low bit as long as
2148 the called function did not use bx. Guess that all other
2149 bits are unchanged; the condition flags are presumably lost,
2150 but the processor status is likely valid. */
2151 if (prev_regnum == ARM_PS_REGNUM)
2152 {
2153 CORE_ADDR lr, cpsr;
2154 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2155
2156 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2157 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2158 if (IS_THUMB_ADDR (lr))
2159 cpsr |= t_bit;
2160 else
2161 cpsr &= ~t_bit;
2162 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2163 }
2164
2165 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2166 prev_regnum);
2167 }
2168
2169 struct frame_unwind arm_prologue_unwind = {
2170 NORMAL_FRAME,
2171 default_frame_unwind_stop_reason,
2172 arm_prologue_this_id,
2173 arm_prologue_prev_register,
2174 NULL,
2175 default_frame_sniffer
2176 };
2177
2178 /* Maintain a list of ARM exception table entries per objfile, similar to the
2179 list of mapping symbols. We only cache entries for standard ARM-defined
2180 personality routines; the cache will contain only the frame unwinding
2181 instructions associated with the entry (not the descriptors). */
2182
2183 static const struct objfile_data *arm_exidx_data_key;
2184
2185 struct arm_exidx_entry
2186 {
2187 bfd_vma addr;
2188 gdb_byte *entry;
2189 };
2190 typedef struct arm_exidx_entry arm_exidx_entry_s;
2191 DEF_VEC_O(arm_exidx_entry_s);
2192
2193 struct arm_exidx_data
2194 {
2195 VEC(arm_exidx_entry_s) **section_maps;
2196 };
2197
2198 static void
2199 arm_exidx_data_free (struct objfile *objfile, void *arg)
2200 {
2201 struct arm_exidx_data *data = arg;
2202 unsigned int i;
2203
2204 for (i = 0; i < objfile->obfd->section_count; i++)
2205 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2206 }
2207
2208 static inline int
2209 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2210 const struct arm_exidx_entry *rhs)
2211 {
2212 return lhs->addr < rhs->addr;
2213 }
2214
2215 static struct obj_section *
2216 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2217 {
2218 struct obj_section *osect;
2219
2220 ALL_OBJFILE_OSECTIONS (objfile, osect)
2221 if (bfd_get_section_flags (objfile->obfd,
2222 osect->the_bfd_section) & SEC_ALLOC)
2223 {
2224 bfd_vma start, size;
2225 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2226 size = bfd_get_section_size (osect->the_bfd_section);
2227
2228 if (start <= vma && vma < start + size)
2229 return osect;
2230 }
2231
2232 return NULL;
2233 }
2234
2235 /* Parse contents of exception table and exception index sections
2236 of OBJFILE, and fill in the exception table entry cache.
2237
2238 For each entry that refers to a standard ARM-defined personality
2239 routine, extract the frame unwinding instructions (from either
2240 the index or the table section). The unwinding instructions
2241 are normalized by:
2242 - extracting them from the rest of the table data
2243 - converting to host endianness
2244 - appending the implicit 0xb0 ("Finish") code
2245
2246 The extracted and normalized instructions are stored for later
2247 retrieval by the arm_find_exidx_entry routine. */
2248
2249 static void
2250 arm_exidx_new_objfile (struct objfile *objfile)
2251 {
2252 struct cleanup *cleanups;
2253 struct arm_exidx_data *data;
2254 asection *exidx, *extab;
2255 bfd_vma exidx_vma = 0, extab_vma = 0;
2256 bfd_size_type exidx_size = 0, extab_size = 0;
2257 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2258 LONGEST i;
2259
2260 /* If we've already touched this file, do nothing. */
2261 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2262 return;
2263 cleanups = make_cleanup (null_cleanup, NULL);
2264
2265 /* Read contents of exception table and index. */
2266 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2267 if (exidx)
2268 {
2269 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2270 exidx_size = bfd_get_section_size (exidx);
2271 exidx_data = xmalloc (exidx_size);
2272 make_cleanup (xfree, exidx_data);
2273
2274 if (!bfd_get_section_contents (objfile->obfd, exidx,
2275 exidx_data, 0, exidx_size))
2276 {
2277 do_cleanups (cleanups);
2278 return;
2279 }
2280 }
2281
2282 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2283 if (extab)
2284 {
2285 extab_vma = bfd_section_vma (objfile->obfd, extab);
2286 extab_size = bfd_get_section_size (extab);
2287 extab_data = xmalloc (extab_size);
2288 make_cleanup (xfree, extab_data);
2289
2290 if (!bfd_get_section_contents (objfile->obfd, extab,
2291 extab_data, 0, extab_size))
2292 {
2293 do_cleanups (cleanups);
2294 return;
2295 }
2296 }
2297
2298 /* Allocate exception table data structure. */
2299 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2300 set_objfile_data (objfile, arm_exidx_data_key, data);
2301 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2302 objfile->obfd->section_count,
2303 VEC(arm_exidx_entry_s) *);
2304
2305 /* Fill in exception table. */
2306 for (i = 0; i < exidx_size / 8; i++)
2307 {
2308 struct arm_exidx_entry new_exidx_entry;
2309 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2310 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2311 bfd_vma addr = 0, word = 0;
2312 int n_bytes = 0, n_words = 0;
2313 struct obj_section *sec;
2314 gdb_byte *entry = NULL;
2315
2316 /* Extract address of start of function. */
2317 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2318 idx += exidx_vma + i * 8;
2319
2320 /* Find section containing function and compute section offset. */
2321 sec = arm_obj_section_from_vma (objfile, idx);
2322 if (sec == NULL)
2323 continue;
2324 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2325
2326 /* Determine address of exception table entry. */
2327 if (val == 1)
2328 {
2329 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2330 }
2331 else if ((val & 0xff000000) == 0x80000000)
2332 {
2333 /* Exception table entry embedded in .ARM.exidx
2334 -- must be short form. */
2335 word = val;
2336 n_bytes = 3;
2337 }
2338 else if (!(val & 0x80000000))
2339 {
2340 /* Exception table entry in .ARM.extab. */
2341 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2342 addr += exidx_vma + i * 8 + 4;
2343
2344 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2345 {
2346 word = bfd_h_get_32 (objfile->obfd,
2347 extab_data + addr - extab_vma);
2348 addr += 4;
2349
2350 if ((word & 0xff000000) == 0x80000000)
2351 {
2352 /* Short form. */
2353 n_bytes = 3;
2354 }
2355 else if ((word & 0xff000000) == 0x81000000
2356 || (word & 0xff000000) == 0x82000000)
2357 {
2358 /* Long form. */
2359 n_bytes = 2;
2360 n_words = ((word >> 16) & 0xff);
2361 }
2362 else if (!(word & 0x80000000))
2363 {
2364 bfd_vma pers;
2365 struct obj_section *pers_sec;
2366 int gnu_personality = 0;
2367
2368 /* Custom personality routine. */
2369 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2370 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2371
2372 /* Check whether we've got one of the variants of the
2373 GNU personality routines. */
2374 pers_sec = arm_obj_section_from_vma (objfile, pers);
2375 if (pers_sec)
2376 {
2377 static const char *personality[] =
2378 {
2379 "__gcc_personality_v0",
2380 "__gxx_personality_v0",
2381 "__gcj_personality_v0",
2382 "__gnu_objc_personality_v0",
2383 NULL
2384 };
2385
2386 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2387 int k;
2388
2389 for (k = 0; personality[k]; k++)
2390 if (lookup_minimal_symbol_by_pc_name
2391 (pc, personality[k], objfile))
2392 {
2393 gnu_personality = 1;
2394 break;
2395 }
2396 }
2397
2398 /* If so, the next word contains a word count in the high
2399 byte, followed by the same unwind instructions as the
2400 pre-defined forms. */
2401 if (gnu_personality
2402 && addr + 4 <= extab_vma + extab_size)
2403 {
2404 word = bfd_h_get_32 (objfile->obfd,
2405 extab_data + addr - extab_vma);
2406 addr += 4;
2407 n_bytes = 3;
2408 n_words = ((word >> 24) & 0xff);
2409 }
2410 }
2411 }
2412 }
2413
2414 /* Sanity check address. */
2415 if (n_words)
2416 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2417 n_words = n_bytes = 0;
2418
2419 /* The unwind instructions reside in WORD (only the N_BYTES least
2420 significant bytes are valid), followed by N_WORDS words in the
2421 extab section starting at ADDR. */
2422 if (n_bytes || n_words)
2423 {
2424 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2425 n_bytes + n_words * 4 + 1);
2426
2427 while (n_bytes--)
2428 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2429
2430 while (n_words--)
2431 {
2432 word = bfd_h_get_32 (objfile->obfd,
2433 extab_data + addr - extab_vma);
2434 addr += 4;
2435
2436 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2437 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2438 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2439 *p++ = (gdb_byte) (word & 0xff);
2440 }
2441
2442 /* Implied "Finish" to terminate the list. */
2443 *p++ = 0xb0;
2444 }
2445
2446 /* Push entry onto vector. They are guaranteed to always
2447 appear in order of increasing addresses. */
2448 new_exidx_entry.addr = idx;
2449 new_exidx_entry.entry = entry;
2450 VEC_safe_push (arm_exidx_entry_s,
2451 data->section_maps[sec->the_bfd_section->index],
2452 &new_exidx_entry);
2453 }
2454
2455 do_cleanups (cleanups);
2456 }
2457
2458 /* Search for the exception table entry covering MEMADDR. If one is found,
2459 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2460 set *START to the start of the region covered by this entry. */
2461
2462 static gdb_byte *
2463 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2464 {
2465 struct obj_section *sec;
2466
2467 sec = find_pc_section (memaddr);
2468 if (sec != NULL)
2469 {
2470 struct arm_exidx_data *data;
2471 VEC(arm_exidx_entry_s) *map;
2472 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2473 unsigned int idx;
2474
2475 data = objfile_data (sec->objfile, arm_exidx_data_key);
2476 if (data != NULL)
2477 {
2478 map = data->section_maps[sec->the_bfd_section->index];
2479 if (!VEC_empty (arm_exidx_entry_s, map))
2480 {
2481 struct arm_exidx_entry *map_sym;
2482
2483 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2484 arm_compare_exidx_entries);
2485
2486 /* VEC_lower_bound finds the earliest ordered insertion
2487 point. If the following symbol starts at this exact
2488 address, we use that; otherwise, the preceding
2489 exception table entry covers this address. */
2490 if (idx < VEC_length (arm_exidx_entry_s, map))
2491 {
2492 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2493 if (map_sym->addr == map_key.addr)
2494 {
2495 if (start)
2496 *start = map_sym->addr + obj_section_addr (sec);
2497 return map_sym->entry;
2498 }
2499 }
2500
2501 if (idx > 0)
2502 {
2503 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2504 if (start)
2505 *start = map_sym->addr + obj_section_addr (sec);
2506 return map_sym->entry;
2507 }
2508 }
2509 }
2510 }
2511
2512 return NULL;
2513 }
2514
2515 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2516 instruction list from the ARM exception table entry ENTRY, allocate and
2517 return a prologue cache structure describing how to unwind this frame.
2518
2519 Return NULL if the unwinding instruction list contains a "spare",
2520 "reserved" or "refuse to unwind" instruction as defined in section
2521 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2522 for the ARM Architecture" document. */
2523
2524 static struct arm_prologue_cache *
2525 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2526 {
2527 CORE_ADDR vsp = 0;
2528 int vsp_valid = 0;
2529
2530 struct arm_prologue_cache *cache;
2531 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2532 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2533
2534 for (;;)
2535 {
2536 gdb_byte insn;
2537
2538 /* Whenever we reload SP, we actually have to retrieve its
2539 actual value in the current frame. */
2540 if (!vsp_valid)
2541 {
2542 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2543 {
2544 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2545 vsp = get_frame_register_unsigned (this_frame, reg);
2546 }
2547 else
2548 {
2549 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2550 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2551 }
2552
2553 vsp_valid = 1;
2554 }
2555
2556 /* Decode next unwind instruction. */
2557 insn = *entry++;
2558
2559 if ((insn & 0xc0) == 0)
2560 {
2561 int offset = insn & 0x3f;
2562 vsp += (offset << 2) + 4;
2563 }
2564 else if ((insn & 0xc0) == 0x40)
2565 {
2566 int offset = insn & 0x3f;
2567 vsp -= (offset << 2) + 4;
2568 }
2569 else if ((insn & 0xf0) == 0x80)
2570 {
2571 int mask = ((insn & 0xf) << 8) | *entry++;
2572 int i;
2573
2574 /* The special case of an all-zero mask identifies
2575 "Refuse to unwind". We return NULL to fall back
2576 to the prologue analyzer. */
2577 if (mask == 0)
2578 return NULL;
2579
2580 /* Pop registers r4..r15 under mask. */
2581 for (i = 0; i < 12; i++)
2582 if (mask & (1 << i))
2583 {
2584 cache->saved_regs[4 + i].addr = vsp;
2585 vsp += 4;
2586 }
2587
2588 /* Special-case popping SP -- we need to reload vsp. */
2589 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2590 vsp_valid = 0;
2591 }
2592 else if ((insn & 0xf0) == 0x90)
2593 {
2594 int reg = insn & 0xf;
2595
2596 /* Reserved cases. */
2597 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2598 return NULL;
2599
2600 /* Set SP from another register and mark VSP for reload. */
2601 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2602 vsp_valid = 0;
2603 }
2604 else if ((insn & 0xf0) == 0xa0)
2605 {
2606 int count = insn & 0x7;
2607 int pop_lr = (insn & 0x8) != 0;
2608 int i;
2609
2610 /* Pop r4..r[4+count]. */
2611 for (i = 0; i <= count; i++)
2612 {
2613 cache->saved_regs[4 + i].addr = vsp;
2614 vsp += 4;
2615 }
2616
2617 /* If indicated by flag, pop LR as well. */
2618 if (pop_lr)
2619 {
2620 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2621 vsp += 4;
2622 }
2623 }
2624 else if (insn == 0xb0)
2625 {
2626 /* We could only have updated PC by popping into it; if so, it
2627 will show up as address. Otherwise, copy LR into PC. */
2628 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2629 cache->saved_regs[ARM_PC_REGNUM]
2630 = cache->saved_regs[ARM_LR_REGNUM];
2631
2632 /* We're done. */
2633 break;
2634 }
2635 else if (insn == 0xb1)
2636 {
2637 int mask = *entry++;
2638 int i;
2639
2640 /* All-zero mask and mask >= 16 is "spare". */
2641 if (mask == 0 || mask >= 16)
2642 return NULL;
2643
2644 /* Pop r0..r3 under mask. */
2645 for (i = 0; i < 4; i++)
2646 if (mask & (1 << i))
2647 {
2648 cache->saved_regs[i].addr = vsp;
2649 vsp += 4;
2650 }
2651 }
2652 else if (insn == 0xb2)
2653 {
2654 ULONGEST offset = 0;
2655 unsigned shift = 0;
2656
2657 do
2658 {
2659 offset |= (*entry & 0x7f) << shift;
2660 shift += 7;
2661 }
2662 while (*entry++ & 0x80);
2663
2664 vsp += 0x204 + (offset << 2);
2665 }
2666 else if (insn == 0xb3)
2667 {
2668 int start = *entry >> 4;
2669 int count = (*entry++) & 0xf;
2670 int i;
2671
2672 /* Only registers D0..D15 are valid here. */
2673 if (start + count >= 16)
2674 return NULL;
2675
2676 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2677 for (i = 0; i <= count; i++)
2678 {
2679 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2680 vsp += 8;
2681 }
2682
2683 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2684 vsp += 4;
2685 }
2686 else if ((insn & 0xf8) == 0xb8)
2687 {
2688 int count = insn & 0x7;
2689 int i;
2690
2691 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2692 for (i = 0; i <= count; i++)
2693 {
2694 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2695 vsp += 8;
2696 }
2697
2698 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2699 vsp += 4;
2700 }
2701 else if (insn == 0xc6)
2702 {
2703 int start = *entry >> 4;
2704 int count = (*entry++) & 0xf;
2705 int i;
2706
2707 /* Only registers WR0..WR15 are valid. */
2708 if (start + count >= 16)
2709 return NULL;
2710
2711 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2712 for (i = 0; i <= count; i++)
2713 {
2714 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2715 vsp += 8;
2716 }
2717 }
2718 else if (insn == 0xc7)
2719 {
2720 int mask = *entry++;
2721 int i;
2722
2723 /* All-zero mask and mask >= 16 is "spare". */
2724 if (mask == 0 || mask >= 16)
2725 return NULL;
2726
2727 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2728 for (i = 0; i < 4; i++)
2729 if (mask & (1 << i))
2730 {
2731 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2732 vsp += 4;
2733 }
2734 }
2735 else if ((insn & 0xf8) == 0xc0)
2736 {
2737 int count = insn & 0x7;
2738 int i;
2739
2740 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2741 for (i = 0; i <= count; i++)
2742 {
2743 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2744 vsp += 8;
2745 }
2746 }
2747 else if (insn == 0xc8)
2748 {
2749 int start = *entry >> 4;
2750 int count = (*entry++) & 0xf;
2751 int i;
2752
2753 /* Only registers D0..D31 are valid. */
2754 if (start + count >= 16)
2755 return NULL;
2756
2757 /* Pop VFP double-precision registers
2758 D[16+start]..D[16+start+count]. */
2759 for (i = 0; i <= count; i++)
2760 {
2761 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2762 vsp += 8;
2763 }
2764 }
2765 else if (insn == 0xc9)
2766 {
2767 int start = *entry >> 4;
2768 int count = (*entry++) & 0xf;
2769 int i;
2770
2771 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2772 for (i = 0; i <= count; i++)
2773 {
2774 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2775 vsp += 8;
2776 }
2777 }
2778 else if ((insn & 0xf8) == 0xd0)
2779 {
2780 int count = insn & 0x7;
2781 int i;
2782
2783 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2784 for (i = 0; i <= count; i++)
2785 {
2786 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2787 vsp += 8;
2788 }
2789 }
2790 else
2791 {
2792 /* Everything else is "spare". */
2793 return NULL;
2794 }
2795 }
2796
2797 /* If we restore SP from a register, assume this was the frame register.
2798 Otherwise just fall back to SP as frame register. */
2799 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2800 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2801 else
2802 cache->framereg = ARM_SP_REGNUM;
2803
2804 /* Determine offset to previous frame. */
2805 cache->framesize
2806 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2807
2808 /* We already got the previous SP. */
2809 cache->prev_sp = vsp;
2810
2811 return cache;
2812 }
2813
2814 /* Unwinding via ARM exception table entries. Note that the sniffer
2815 already computes a filled-in prologue cache, which is then used
2816 with the same arm_prologue_this_id and arm_prologue_prev_register
2817 routines also used for prologue-parsing based unwinding. */
2818
2819 static int
2820 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2821 struct frame_info *this_frame,
2822 void **this_prologue_cache)
2823 {
2824 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2825 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2826 CORE_ADDR addr_in_block, exidx_region, func_start;
2827 struct arm_prologue_cache *cache;
2828 gdb_byte *entry;
2829
2830 /* See if we have an ARM exception table entry covering this address. */
2831 addr_in_block = get_frame_address_in_block (this_frame);
2832 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2833 if (!entry)
2834 return 0;
2835
2836 /* The ARM exception table does not describe unwind information
2837 for arbitrary PC values, but is guaranteed to be correct only
2838 at call sites. We have to decide here whether we want to use
2839 ARM exception table information for this frame, or fall back
2840 to using prologue parsing. (Note that if we have DWARF CFI,
2841 this sniffer isn't even called -- CFI is always preferred.)
2842
2843 Before we make this decision, however, we check whether we
2844 actually have *symbol* information for the current frame.
2845 If not, prologue parsing would not work anyway, so we might
2846 as well use the exception table and hope for the best. */
2847 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2848 {
2849 int exc_valid = 0;
2850
2851 /* If the next frame is "normal", we are at a call site in this
2852 frame, so exception information is guaranteed to be valid. */
2853 if (get_next_frame (this_frame)
2854 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2855 exc_valid = 1;
2856
2857 /* We also assume exception information is valid if we're currently
2858 blocked in a system call. The system library is supposed to
2859 ensure this, so that e.g. pthread cancellation works. */
2860 if (arm_frame_is_thumb (this_frame))
2861 {
2862 LONGEST insn;
2863
2864 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2865 byte_order_for_code, &insn)
2866 && (insn & 0xff00) == 0xdf00 /* svc */)
2867 exc_valid = 1;
2868 }
2869 else
2870 {
2871 LONGEST insn;
2872
2873 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2874 byte_order_for_code, &insn)
2875 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2876 exc_valid = 1;
2877 }
2878
2879 /* Bail out if we don't know that exception information is valid. */
2880 if (!exc_valid)
2881 return 0;
2882
2883 /* The ARM exception index does not mark the *end* of the region
2884 covered by the entry, and some functions will not have any entry.
2885 To correctly recognize the end of the covered region, the linker
2886 should have inserted dummy records with a CANTUNWIND marker.
2887
2888 Unfortunately, current versions of GNU ld do not reliably do
2889 this, and thus we may have found an incorrect entry above.
2890 As a (temporary) sanity check, we only use the entry if it
2891 lies *within* the bounds of the function. Note that this check
2892 might reject perfectly valid entries that just happen to cover
2893 multiple functions; therefore this check ought to be removed
2894 once the linker is fixed. */
2895 if (func_start > exidx_region)
2896 return 0;
2897 }
2898
2899 /* Decode the list of unwinding instructions into a prologue cache.
2900 Note that this may fail due to e.g. a "refuse to unwind" code. */
2901 cache = arm_exidx_fill_cache (this_frame, entry);
2902 if (!cache)
2903 return 0;
2904
2905 *this_prologue_cache = cache;
2906 return 1;
2907 }
2908
2909 struct frame_unwind arm_exidx_unwind = {
2910 NORMAL_FRAME,
2911 default_frame_unwind_stop_reason,
2912 arm_prologue_this_id,
2913 arm_prologue_prev_register,
2914 NULL,
2915 arm_exidx_unwind_sniffer
2916 };
2917
2918 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2919 trampoline, return the target PC. Otherwise return 0.
2920
2921 void call0a (char c, short s, int i, long l) {}
2922
2923 int main (void)
2924 {
2925 (*pointer_to_call0a) (c, s, i, l);
2926 }
2927
2928 Instead of calling a stub library function _call_via_xx (xx is
2929 the register name), GCC may inline the trampoline in the object
2930 file as below (register r2 has the address of call0a).
2931
2932 .global main
2933 .type main, %function
2934 ...
2935 bl .L1
2936 ...
2937 .size main, .-main
2938
2939 .L1:
2940 bx r2
2941
2942 The trampoline 'bx r2' doesn't belong to main. */
2943
2944 static CORE_ADDR
2945 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2946 {
2947 /* The heuristics of recognizing such trampoline is that FRAME is
2948 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2949 if (arm_frame_is_thumb (frame))
2950 {
2951 gdb_byte buf[2];
2952
2953 if (target_read_memory (pc, buf, 2) == 0)
2954 {
2955 struct gdbarch *gdbarch = get_frame_arch (frame);
2956 enum bfd_endian byte_order_for_code
2957 = gdbarch_byte_order_for_code (gdbarch);
2958 uint16_t insn
2959 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2960
2961 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2962 {
2963 CORE_ADDR dest
2964 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2965
2966 /* Clear the LSB so that gdb core sets step-resume
2967 breakpoint at the right address. */
2968 return UNMAKE_THUMB_ADDR (dest);
2969 }
2970 }
2971 }
2972
2973 return 0;
2974 }
2975
2976 static struct arm_prologue_cache *
2977 arm_make_stub_cache (struct frame_info *this_frame)
2978 {
2979 struct arm_prologue_cache *cache;
2980
2981 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2982 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2983
2984 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2985
2986 return cache;
2987 }
2988
2989 /* Our frame ID for a stub frame is the current SP and LR. */
2990
2991 static void
2992 arm_stub_this_id (struct frame_info *this_frame,
2993 void **this_cache,
2994 struct frame_id *this_id)
2995 {
2996 struct arm_prologue_cache *cache;
2997
2998 if (*this_cache == NULL)
2999 *this_cache = arm_make_stub_cache (this_frame);
3000 cache = *this_cache;
3001
3002 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
3003 }
3004
3005 static int
3006 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3007 struct frame_info *this_frame,
3008 void **this_prologue_cache)
3009 {
3010 CORE_ADDR addr_in_block;
3011 gdb_byte dummy[4];
3012 CORE_ADDR pc, start_addr;
3013 const char *name;
3014
3015 addr_in_block = get_frame_address_in_block (this_frame);
3016 pc = get_frame_pc (this_frame);
3017 if (in_plt_section (addr_in_block)
3018 /* We also use the stub winder if the target memory is unreadable
3019 to avoid having the prologue unwinder trying to read it. */
3020 || target_read_memory (pc, dummy, 4) != 0)
3021 return 1;
3022
3023 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3024 && arm_skip_bx_reg (this_frame, pc) != 0)
3025 return 1;
3026
3027 return 0;
3028 }
3029
3030 struct frame_unwind arm_stub_unwind = {
3031 NORMAL_FRAME,
3032 default_frame_unwind_stop_reason,
3033 arm_stub_this_id,
3034 arm_prologue_prev_register,
3035 NULL,
3036 arm_stub_unwind_sniffer
3037 };
3038
3039 /* Put here the code to store, into CACHE->saved_regs, the addresses
3040 of the saved registers of frame described by THIS_FRAME. CACHE is
3041 returned. */
3042
3043 static struct arm_prologue_cache *
3044 arm_m_exception_cache (struct frame_info *this_frame)
3045 {
3046 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3047 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3048 struct arm_prologue_cache *cache;
3049 CORE_ADDR unwound_sp;
3050 LONGEST xpsr;
3051
3052 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3053 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3054
3055 unwound_sp = get_frame_register_unsigned (this_frame,
3056 ARM_SP_REGNUM);
3057
3058 /* The hardware saves eight 32-bit words, comprising xPSR,
3059 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3060 "B1.5.6 Exception entry behavior" in
3061 "ARMv7-M Architecture Reference Manual". */
3062 cache->saved_regs[0].addr = unwound_sp;
3063 cache->saved_regs[1].addr = unwound_sp + 4;
3064 cache->saved_regs[2].addr = unwound_sp + 8;
3065 cache->saved_regs[3].addr = unwound_sp + 12;
3066 cache->saved_regs[12].addr = unwound_sp + 16;
3067 cache->saved_regs[14].addr = unwound_sp + 20;
3068 cache->saved_regs[15].addr = unwound_sp + 24;
3069 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3070
3071 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3072 aligner between the top of the 32-byte stack frame and the
3073 previous context's stack pointer. */
3074 cache->prev_sp = unwound_sp + 32;
3075 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3076 && (xpsr & (1 << 9)) != 0)
3077 cache->prev_sp += 4;
3078
3079 return cache;
3080 }
3081
3082 /* Implementation of function hook 'this_id' in
3083 'struct frame_uwnind'. */
3084
3085 static void
3086 arm_m_exception_this_id (struct frame_info *this_frame,
3087 void **this_cache,
3088 struct frame_id *this_id)
3089 {
3090 struct arm_prologue_cache *cache;
3091
3092 if (*this_cache == NULL)
3093 *this_cache = arm_m_exception_cache (this_frame);
3094 cache = *this_cache;
3095
3096 /* Our frame ID for a stub frame is the current SP and LR. */
3097 *this_id = frame_id_build (cache->prev_sp,
3098 get_frame_pc (this_frame));
3099 }
3100
3101 /* Implementation of function hook 'prev_register' in
3102 'struct frame_uwnind'. */
3103
3104 static struct value *
3105 arm_m_exception_prev_register (struct frame_info *this_frame,
3106 void **this_cache,
3107 int prev_regnum)
3108 {
3109 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3110 struct arm_prologue_cache *cache;
3111
3112 if (*this_cache == NULL)
3113 *this_cache = arm_m_exception_cache (this_frame);
3114 cache = *this_cache;
3115
3116 /* The value was already reconstructed into PREV_SP. */
3117 if (prev_regnum == ARM_SP_REGNUM)
3118 return frame_unwind_got_constant (this_frame, prev_regnum,
3119 cache->prev_sp);
3120
3121 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3122 prev_regnum);
3123 }
3124
3125 /* Implementation of function hook 'sniffer' in
3126 'struct frame_uwnind'. */
3127
3128 static int
3129 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3130 struct frame_info *this_frame,
3131 void **this_prologue_cache)
3132 {
3133 CORE_ADDR this_pc = get_frame_pc (this_frame);
3134
3135 /* No need to check is_m; this sniffer is only registered for
3136 M-profile architectures. */
3137
3138 /* Exception frames return to one of these magic PCs. Other values
3139 are not defined as of v7-M. See details in "B1.5.8 Exception
3140 return behavior" in "ARMv7-M Architecture Reference Manual". */
3141 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3142 || this_pc == 0xfffffffd)
3143 return 1;
3144
3145 return 0;
3146 }
3147
3148 /* Frame unwinder for M-profile exceptions. */
3149
3150 struct frame_unwind arm_m_exception_unwind =
3151 {
3152 SIGTRAMP_FRAME,
3153 default_frame_unwind_stop_reason,
3154 arm_m_exception_this_id,
3155 arm_m_exception_prev_register,
3156 NULL,
3157 arm_m_exception_unwind_sniffer
3158 };
3159
3160 static CORE_ADDR
3161 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3162 {
3163 struct arm_prologue_cache *cache;
3164
3165 if (*this_cache == NULL)
3166 *this_cache = arm_make_prologue_cache (this_frame);
3167 cache = *this_cache;
3168
3169 return cache->prev_sp - cache->framesize;
3170 }
3171
3172 struct frame_base arm_normal_base = {
3173 &arm_prologue_unwind,
3174 arm_normal_frame_base,
3175 arm_normal_frame_base,
3176 arm_normal_frame_base
3177 };
3178
3179 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3180 dummy frame. The frame ID's base needs to match the TOS value
3181 saved by save_dummy_frame_tos() and returned from
3182 arm_push_dummy_call, and the PC needs to match the dummy frame's
3183 breakpoint. */
3184
3185 static struct frame_id
3186 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3187 {
3188 return frame_id_build (get_frame_register_unsigned (this_frame,
3189 ARM_SP_REGNUM),
3190 get_frame_pc (this_frame));
3191 }
3192
3193 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3194 be used to construct the previous frame's ID, after looking up the
3195 containing function). */
3196
3197 static CORE_ADDR
3198 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3199 {
3200 CORE_ADDR pc;
3201 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3202 return arm_addr_bits_remove (gdbarch, pc);
3203 }
3204
3205 static CORE_ADDR
3206 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3207 {
3208 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3209 }
3210
3211 static struct value *
3212 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3213 int regnum)
3214 {
3215 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3216 CORE_ADDR lr, cpsr;
3217 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3218
3219 switch (regnum)
3220 {
3221 case ARM_PC_REGNUM:
3222 /* The PC is normally copied from the return column, which
3223 describes saves of LR. However, that version may have an
3224 extra bit set to indicate Thumb state. The bit is not
3225 part of the PC. */
3226 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3227 return frame_unwind_got_constant (this_frame, regnum,
3228 arm_addr_bits_remove (gdbarch, lr));
3229
3230 case ARM_PS_REGNUM:
3231 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3232 cpsr = get_frame_register_unsigned (this_frame, regnum);
3233 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3234 if (IS_THUMB_ADDR (lr))
3235 cpsr |= t_bit;
3236 else
3237 cpsr &= ~t_bit;
3238 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3239
3240 default:
3241 internal_error (__FILE__, __LINE__,
3242 _("Unexpected register %d"), regnum);
3243 }
3244 }
3245
3246 static void
3247 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3248 struct dwarf2_frame_state_reg *reg,
3249 struct frame_info *this_frame)
3250 {
3251 switch (regnum)
3252 {
3253 case ARM_PC_REGNUM:
3254 case ARM_PS_REGNUM:
3255 reg->how = DWARF2_FRAME_REG_FN;
3256 reg->loc.fn = arm_dwarf2_prev_register;
3257 break;
3258 case ARM_SP_REGNUM:
3259 reg->how = DWARF2_FRAME_REG_CFA;
3260 break;
3261 }
3262 }
3263
3264 /* Return true if we are in the function's epilogue, i.e. after the
3265 instruction that destroyed the function's stack frame. */
3266
3267 static int
3268 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3269 {
3270 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3271 unsigned int insn, insn2;
3272 int found_return = 0, found_stack_adjust = 0;
3273 CORE_ADDR func_start, func_end;
3274 CORE_ADDR scan_pc;
3275 gdb_byte buf[4];
3276
3277 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3278 return 0;
3279
3280 /* The epilogue is a sequence of instructions along the following lines:
3281
3282 - add stack frame size to SP or FP
3283 - [if frame pointer used] restore SP from FP
3284 - restore registers from SP [may include PC]
3285 - a return-type instruction [if PC wasn't already restored]
3286
3287 In a first pass, we scan forward from the current PC and verify the
3288 instructions we find as compatible with this sequence, ending in a
3289 return instruction.
3290
3291 However, this is not sufficient to distinguish indirect function calls
3292 within a function from indirect tail calls in the epilogue in some cases.
3293 Therefore, if we didn't already find any SP-changing instruction during
3294 forward scan, we add a backward scanning heuristic to ensure we actually
3295 are in the epilogue. */
3296
3297 scan_pc = pc;
3298 while (scan_pc < func_end && !found_return)
3299 {
3300 if (target_read_memory (scan_pc, buf, 2))
3301 break;
3302
3303 scan_pc += 2;
3304 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3305
3306 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3307 found_return = 1;
3308 else if (insn == 0x46f7) /* mov pc, lr */
3309 found_return = 1;
3310 else if (thumb_instruction_restores_sp (insn))
3311 {
3312 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3313 found_return = 1;
3314 }
3315 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3316 {
3317 if (target_read_memory (scan_pc, buf, 2))
3318 break;
3319
3320 scan_pc += 2;
3321 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3322
3323 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3324 {
3325 if (insn2 & 0x8000) /* <registers> include PC. */
3326 found_return = 1;
3327 }
3328 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3329 && (insn2 & 0x0fff) == 0x0b04)
3330 {
3331 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3332 found_return = 1;
3333 }
3334 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3335 && (insn2 & 0x0e00) == 0x0a00)
3336 ;
3337 else
3338 break;
3339 }
3340 else
3341 break;
3342 }
3343
3344 if (!found_return)
3345 return 0;
3346
3347 /* Since any instruction in the epilogue sequence, with the possible
3348 exception of return itself, updates the stack pointer, we need to
3349 scan backwards for at most one instruction. Try either a 16-bit or
3350 a 32-bit instruction. This is just a heuristic, so we do not worry
3351 too much about false positives. */
3352
3353 if (pc - 4 < func_start)
3354 return 0;
3355 if (target_read_memory (pc - 4, buf, 4))
3356 return 0;
3357
3358 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3359 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3360
3361 if (thumb_instruction_restores_sp (insn2))
3362 found_stack_adjust = 1;
3363 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3364 found_stack_adjust = 1;
3365 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3366 && (insn2 & 0x0fff) == 0x0b04)
3367 found_stack_adjust = 1;
3368 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3369 && (insn2 & 0x0e00) == 0x0a00)
3370 found_stack_adjust = 1;
3371
3372 return found_stack_adjust;
3373 }
3374
3375 /* Return true if we are in the function's epilogue, i.e. after the
3376 instruction that destroyed the function's stack frame. */
3377
3378 static int
3379 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3380 {
3381 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3382 unsigned int insn;
3383 int found_return;
3384 CORE_ADDR func_start, func_end;
3385
3386 if (arm_pc_is_thumb (gdbarch, pc))
3387 return thumb_in_function_epilogue_p (gdbarch, pc);
3388
3389 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3390 return 0;
3391
3392 /* We are in the epilogue if the previous instruction was a stack
3393 adjustment and the next instruction is a possible return (bx, mov
3394 pc, or pop). We could have to scan backwards to find the stack
3395 adjustment, or forwards to find the return, but this is a decent
3396 approximation. First scan forwards. */
3397
3398 found_return = 0;
3399 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3400 if (bits (insn, 28, 31) != INST_NV)
3401 {
3402 if ((insn & 0x0ffffff0) == 0x012fff10)
3403 /* BX. */
3404 found_return = 1;
3405 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3406 /* MOV PC. */
3407 found_return = 1;
3408 else if ((insn & 0x0fff0000) == 0x08bd0000
3409 && (insn & 0x0000c000) != 0)
3410 /* POP (LDMIA), including PC or LR. */
3411 found_return = 1;
3412 }
3413
3414 if (!found_return)
3415 return 0;
3416
3417 /* Scan backwards. This is just a heuristic, so do not worry about
3418 false positives from mode changes. */
3419
3420 if (pc < func_start + 4)
3421 return 0;
3422
3423 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3424 if (arm_instruction_restores_sp (insn))
3425 return 1;
3426
3427 return 0;
3428 }
3429
3430
3431 /* When arguments must be pushed onto the stack, they go on in reverse
3432 order. The code below implements a FILO (stack) to do this. */
3433
3434 struct stack_item
3435 {
3436 int len;
3437 struct stack_item *prev;
3438 void *data;
3439 };
3440
3441 static struct stack_item *
3442 push_stack_item (struct stack_item *prev, const void *contents, int len)
3443 {
3444 struct stack_item *si;
3445 si = xmalloc (sizeof (struct stack_item));
3446 si->data = xmalloc (len);
3447 si->len = len;
3448 si->prev = prev;
3449 memcpy (si->data, contents, len);
3450 return si;
3451 }
3452
3453 static struct stack_item *
3454 pop_stack_item (struct stack_item *si)
3455 {
3456 struct stack_item *dead = si;
3457 si = si->prev;
3458 xfree (dead->data);
3459 xfree (dead);
3460 return si;
3461 }
3462
3463
3464 /* Return the alignment (in bytes) of the given type. */
3465
3466 static int
3467 arm_type_align (struct type *t)
3468 {
3469 int n;
3470 int align;
3471 int falign;
3472
3473 t = check_typedef (t);
3474 switch (TYPE_CODE (t))
3475 {
3476 default:
3477 /* Should never happen. */
3478 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3479 return 4;
3480
3481 case TYPE_CODE_PTR:
3482 case TYPE_CODE_ENUM:
3483 case TYPE_CODE_INT:
3484 case TYPE_CODE_FLT:
3485 case TYPE_CODE_SET:
3486 case TYPE_CODE_RANGE:
3487 case TYPE_CODE_REF:
3488 case TYPE_CODE_CHAR:
3489 case TYPE_CODE_BOOL:
3490 return TYPE_LENGTH (t);
3491
3492 case TYPE_CODE_ARRAY:
3493 case TYPE_CODE_COMPLEX:
3494 /* TODO: What about vector types? */
3495 return arm_type_align (TYPE_TARGET_TYPE (t));
3496
3497 case TYPE_CODE_STRUCT:
3498 case TYPE_CODE_UNION:
3499 align = 1;
3500 for (n = 0; n < TYPE_NFIELDS (t); n++)
3501 {
3502 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3503 if (falign > align)
3504 align = falign;
3505 }
3506 return align;
3507 }
3508 }
3509
3510 /* Possible base types for a candidate for passing and returning in
3511 VFP registers. */
3512
3513 enum arm_vfp_cprc_base_type
3514 {
3515 VFP_CPRC_UNKNOWN,
3516 VFP_CPRC_SINGLE,
3517 VFP_CPRC_DOUBLE,
3518 VFP_CPRC_VEC64,
3519 VFP_CPRC_VEC128
3520 };
3521
3522 /* The length of one element of base type B. */
3523
3524 static unsigned
3525 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3526 {
3527 switch (b)
3528 {
3529 case VFP_CPRC_SINGLE:
3530 return 4;
3531 case VFP_CPRC_DOUBLE:
3532 return 8;
3533 case VFP_CPRC_VEC64:
3534 return 8;
3535 case VFP_CPRC_VEC128:
3536 return 16;
3537 default:
3538 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3539 (int) b);
3540 }
3541 }
3542
3543 /* The character ('s', 'd' or 'q') for the type of VFP register used
3544 for passing base type B. */
3545
3546 static int
3547 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3548 {
3549 switch (b)
3550 {
3551 case VFP_CPRC_SINGLE:
3552 return 's';
3553 case VFP_CPRC_DOUBLE:
3554 return 'd';
3555 case VFP_CPRC_VEC64:
3556 return 'd';
3557 case VFP_CPRC_VEC128:
3558 return 'q';
3559 default:
3560 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3561 (int) b);
3562 }
3563 }
3564
3565 /* Determine whether T may be part of a candidate for passing and
3566 returning in VFP registers, ignoring the limit on the total number
3567 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3568 classification of the first valid component found; if it is not
3569 VFP_CPRC_UNKNOWN, all components must have the same classification
3570 as *BASE_TYPE. If it is found that T contains a type not permitted
3571 for passing and returning in VFP registers, a type differently
3572 classified from *BASE_TYPE, or two types differently classified
3573 from each other, return -1, otherwise return the total number of
3574 base-type elements found (possibly 0 in an empty structure or
3575 array). Vector types are not currently supported, matching the
3576 generic AAPCS support. */
3577
3578 static int
3579 arm_vfp_cprc_sub_candidate (struct type *t,
3580 enum arm_vfp_cprc_base_type *base_type)
3581 {
3582 t = check_typedef (t);
3583 switch (TYPE_CODE (t))
3584 {
3585 case TYPE_CODE_FLT:
3586 switch (TYPE_LENGTH (t))
3587 {
3588 case 4:
3589 if (*base_type == VFP_CPRC_UNKNOWN)
3590 *base_type = VFP_CPRC_SINGLE;
3591 else if (*base_type != VFP_CPRC_SINGLE)
3592 return -1;
3593 return 1;
3594
3595 case 8:
3596 if (*base_type == VFP_CPRC_UNKNOWN)
3597 *base_type = VFP_CPRC_DOUBLE;
3598 else if (*base_type != VFP_CPRC_DOUBLE)
3599 return -1;
3600 return 1;
3601
3602 default:
3603 return -1;
3604 }
3605 break;
3606
3607 case TYPE_CODE_COMPLEX:
3608 /* Arguments of complex T where T is one of the types float or
3609 double get treated as if they are implemented as:
3610
3611 struct complexT
3612 {
3613 T real;
3614 T imag;
3615 };
3616
3617 */
3618 switch (TYPE_LENGTH (t))
3619 {
3620 case 8:
3621 if (*base_type == VFP_CPRC_UNKNOWN)
3622 *base_type = VFP_CPRC_SINGLE;
3623 else if (*base_type != VFP_CPRC_SINGLE)
3624 return -1;
3625 return 2;
3626
3627 case 16:
3628 if (*base_type == VFP_CPRC_UNKNOWN)
3629 *base_type = VFP_CPRC_DOUBLE;
3630 else if (*base_type != VFP_CPRC_DOUBLE)
3631 return -1;
3632 return 2;
3633
3634 default:
3635 return -1;
3636 }
3637 break;
3638
3639 case TYPE_CODE_ARRAY:
3640 {
3641 int count;
3642 unsigned unitlen;
3643 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3644 if (count == -1)
3645 return -1;
3646 if (TYPE_LENGTH (t) == 0)
3647 {
3648 gdb_assert (count == 0);
3649 return 0;
3650 }
3651 else if (count == 0)
3652 return -1;
3653 unitlen = arm_vfp_cprc_unit_length (*base_type);
3654 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3655 return TYPE_LENGTH (t) / unitlen;
3656 }
3657 break;
3658
3659 case TYPE_CODE_STRUCT:
3660 {
3661 int count = 0;
3662 unsigned unitlen;
3663 int i;
3664 for (i = 0; i < TYPE_NFIELDS (t); i++)
3665 {
3666 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3667 base_type);
3668 if (sub_count == -1)
3669 return -1;
3670 count += sub_count;
3671 }
3672 if (TYPE_LENGTH (t) == 0)
3673 {
3674 gdb_assert (count == 0);
3675 return 0;
3676 }
3677 else if (count == 0)
3678 return -1;
3679 unitlen = arm_vfp_cprc_unit_length (*base_type);
3680 if (TYPE_LENGTH (t) != unitlen * count)
3681 return -1;
3682 return count;
3683 }
3684
3685 case TYPE_CODE_UNION:
3686 {
3687 int count = 0;
3688 unsigned unitlen;
3689 int i;
3690 for (i = 0; i < TYPE_NFIELDS (t); i++)
3691 {
3692 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3693 base_type);
3694 if (sub_count == -1)
3695 return -1;
3696 count = (count > sub_count ? count : sub_count);
3697 }
3698 if (TYPE_LENGTH (t) == 0)
3699 {
3700 gdb_assert (count == 0);
3701 return 0;
3702 }
3703 else if (count == 0)
3704 return -1;
3705 unitlen = arm_vfp_cprc_unit_length (*base_type);
3706 if (TYPE_LENGTH (t) != unitlen * count)
3707 return -1;
3708 return count;
3709 }
3710
3711 default:
3712 break;
3713 }
3714
3715 return -1;
3716 }
3717
3718 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3719 if passed to or returned from a non-variadic function with the VFP
3720 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3721 *BASE_TYPE to the base type for T and *COUNT to the number of
3722 elements of that base type before returning. */
3723
3724 static int
3725 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3726 int *count)
3727 {
3728 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3729 int c = arm_vfp_cprc_sub_candidate (t, &b);
3730 if (c <= 0 || c > 4)
3731 return 0;
3732 *base_type = b;
3733 *count = c;
3734 return 1;
3735 }
3736
3737 /* Return 1 if the VFP ABI should be used for passing arguments to and
3738 returning values from a function of type FUNC_TYPE, 0
3739 otherwise. */
3740
3741 static int
3742 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3743 {
3744 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3745 /* Variadic functions always use the base ABI. Assume that functions
3746 without debug info are not variadic. */
3747 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3748 return 0;
3749 /* The VFP ABI is only supported as a variant of AAPCS. */
3750 if (tdep->arm_abi != ARM_ABI_AAPCS)
3751 return 0;
3752 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3753 }
3754
3755 /* We currently only support passing parameters in integer registers, which
3756 conforms with GCC's default model, and VFP argument passing following
3757 the VFP variant of AAPCS. Several other variants exist and
3758 we should probably support some of them based on the selected ABI. */
3759
3760 static CORE_ADDR
3761 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3762 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3763 struct value **args, CORE_ADDR sp, int struct_return,
3764 CORE_ADDR struct_addr)
3765 {
3766 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3767 int argnum;
3768 int argreg;
3769 int nstack;
3770 struct stack_item *si = NULL;
3771 int use_vfp_abi;
3772 struct type *ftype;
3773 unsigned vfp_regs_free = (1 << 16) - 1;
3774
3775 /* Determine the type of this function and whether the VFP ABI
3776 applies. */
3777 ftype = check_typedef (value_type (function));
3778 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3779 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3780 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3781
3782 /* Set the return address. For the ARM, the return breakpoint is
3783 always at BP_ADDR. */
3784 if (arm_pc_is_thumb (gdbarch, bp_addr))
3785 bp_addr |= 1;
3786 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3787
3788 /* Walk through the list of args and determine how large a temporary
3789 stack is required. Need to take care here as structs may be
3790 passed on the stack, and we have to push them. */
3791 nstack = 0;
3792
3793 argreg = ARM_A1_REGNUM;
3794 nstack = 0;
3795
3796 /* The struct_return pointer occupies the first parameter
3797 passing register. */
3798 if (struct_return)
3799 {
3800 if (arm_debug)
3801 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3802 gdbarch_register_name (gdbarch, argreg),
3803 paddress (gdbarch, struct_addr));
3804 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3805 argreg++;
3806 }
3807
3808 for (argnum = 0; argnum < nargs; argnum++)
3809 {
3810 int len;
3811 struct type *arg_type;
3812 struct type *target_type;
3813 enum type_code typecode;
3814 const bfd_byte *val;
3815 int align;
3816 enum arm_vfp_cprc_base_type vfp_base_type;
3817 int vfp_base_count;
3818 int may_use_core_reg = 1;
3819
3820 arg_type = check_typedef (value_type (args[argnum]));
3821 len = TYPE_LENGTH (arg_type);
3822 target_type = TYPE_TARGET_TYPE (arg_type);
3823 typecode = TYPE_CODE (arg_type);
3824 val = value_contents (args[argnum]);
3825
3826 align = arm_type_align (arg_type);
3827 /* Round alignment up to a whole number of words. */
3828 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3829 /* Different ABIs have different maximum alignments. */
3830 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3831 {
3832 /* The APCS ABI only requires word alignment. */
3833 align = INT_REGISTER_SIZE;
3834 }
3835 else
3836 {
3837 /* The AAPCS requires at most doubleword alignment. */
3838 if (align > INT_REGISTER_SIZE * 2)
3839 align = INT_REGISTER_SIZE * 2;
3840 }
3841
3842 if (use_vfp_abi
3843 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3844 &vfp_base_count))
3845 {
3846 int regno;
3847 int unit_length;
3848 int shift;
3849 unsigned mask;
3850
3851 /* Because this is a CPRC it cannot go in a core register or
3852 cause a core register to be skipped for alignment.
3853 Either it goes in VFP registers and the rest of this loop
3854 iteration is skipped for this argument, or it goes on the
3855 stack (and the stack alignment code is correct for this
3856 case). */
3857 may_use_core_reg = 0;
3858
3859 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3860 shift = unit_length / 4;
3861 mask = (1 << (shift * vfp_base_count)) - 1;
3862 for (regno = 0; regno < 16; regno += shift)
3863 if (((vfp_regs_free >> regno) & mask) == mask)
3864 break;
3865
3866 if (regno < 16)
3867 {
3868 int reg_char;
3869 int reg_scaled;
3870 int i;
3871
3872 vfp_regs_free &= ~(mask << regno);
3873 reg_scaled = regno / shift;
3874 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3875 for (i = 0; i < vfp_base_count; i++)
3876 {
3877 char name_buf[4];
3878 int regnum;
3879 if (reg_char == 'q')
3880 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3881 val + i * unit_length);
3882 else
3883 {
3884 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3885 reg_char, reg_scaled + i);
3886 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3887 strlen (name_buf));
3888 regcache_cooked_write (regcache, regnum,
3889 val + i * unit_length);
3890 }
3891 }
3892 continue;
3893 }
3894 else
3895 {
3896 /* This CPRC could not go in VFP registers, so all VFP
3897 registers are now marked as used. */
3898 vfp_regs_free = 0;
3899 }
3900 }
3901
3902 /* Push stack padding for dowubleword alignment. */
3903 if (nstack & (align - 1))
3904 {
3905 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3906 nstack += INT_REGISTER_SIZE;
3907 }
3908
3909 /* Doubleword aligned quantities must go in even register pairs. */
3910 if (may_use_core_reg
3911 && argreg <= ARM_LAST_ARG_REGNUM
3912 && align > INT_REGISTER_SIZE
3913 && argreg & 1)
3914 argreg++;
3915
3916 /* If the argument is a pointer to a function, and it is a
3917 Thumb function, create a LOCAL copy of the value and set
3918 the THUMB bit in it. */
3919 if (TYPE_CODE_PTR == typecode
3920 && target_type != NULL
3921 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3922 {
3923 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3924 if (arm_pc_is_thumb (gdbarch, regval))
3925 {
3926 bfd_byte *copy = alloca (len);
3927 store_unsigned_integer (copy, len, byte_order,
3928 MAKE_THUMB_ADDR (regval));
3929 val = copy;
3930 }
3931 }
3932
3933 /* Copy the argument to general registers or the stack in
3934 register-sized pieces. Large arguments are split between
3935 registers and stack. */
3936 while (len > 0)
3937 {
3938 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3939
3940 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3941 {
3942 /* The argument is being passed in a general purpose
3943 register. */
3944 CORE_ADDR regval
3945 = extract_unsigned_integer (val, partial_len, byte_order);
3946 if (byte_order == BFD_ENDIAN_BIG)
3947 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3948 if (arm_debug)
3949 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3950 argnum,
3951 gdbarch_register_name
3952 (gdbarch, argreg),
3953 phex (regval, INT_REGISTER_SIZE));
3954 regcache_cooked_write_unsigned (regcache, argreg, regval);
3955 argreg++;
3956 }
3957 else
3958 {
3959 /* Push the arguments onto the stack. */
3960 if (arm_debug)
3961 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3962 argnum, nstack);
3963 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3964 nstack += INT_REGISTER_SIZE;
3965 }
3966
3967 len -= partial_len;
3968 val += partial_len;
3969 }
3970 }
3971 /* If we have an odd number of words to push, then decrement the stack
3972 by one word now, so first stack argument will be dword aligned. */
3973 if (nstack & 4)
3974 sp -= 4;
3975
3976 while (si)
3977 {
3978 sp -= si->len;
3979 write_memory (sp, si->data, si->len);
3980 si = pop_stack_item (si);
3981 }
3982
3983 /* Finally, update teh SP register. */
3984 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3985
3986 return sp;
3987 }
3988
3989
3990 /* Always align the frame to an 8-byte boundary. This is required on
3991 some platforms and harmless on the rest. */
3992
3993 static CORE_ADDR
3994 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3995 {
3996 /* Align the stack to eight bytes. */
3997 return sp & ~ (CORE_ADDR) 7;
3998 }
3999
4000 static void
4001 print_fpu_flags (struct ui_file *file, int flags)
4002 {
4003 if (flags & (1 << 0))
4004 fputs_filtered ("IVO ", file);
4005 if (flags & (1 << 1))
4006 fputs_filtered ("DVZ ", file);
4007 if (flags & (1 << 2))
4008 fputs_filtered ("OFL ", file);
4009 if (flags & (1 << 3))
4010 fputs_filtered ("UFL ", file);
4011 if (flags & (1 << 4))
4012 fputs_filtered ("INX ", file);
4013 fputc_filtered ('\n', file);
4014 }
4015
4016 /* Print interesting information about the floating point processor
4017 (if present) or emulator. */
4018 static void
4019 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4020 struct frame_info *frame, const char *args)
4021 {
4022 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4023 int type;
4024
4025 type = (status >> 24) & 127;
4026 if (status & (1 << 31))
4027 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4028 else
4029 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4030 /* i18n: [floating point unit] mask */
4031 fputs_filtered (_("mask: "), file);
4032 print_fpu_flags (file, status >> 16);
4033 /* i18n: [floating point unit] flags */
4034 fputs_filtered (_("flags: "), file);
4035 print_fpu_flags (file, status);
4036 }
4037
4038 /* Construct the ARM extended floating point type. */
4039 static struct type *
4040 arm_ext_type (struct gdbarch *gdbarch)
4041 {
4042 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4043
4044 if (!tdep->arm_ext_type)
4045 tdep->arm_ext_type
4046 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4047 floatformats_arm_ext);
4048
4049 return tdep->arm_ext_type;
4050 }
4051
4052 static struct type *
4053 arm_neon_double_type (struct gdbarch *gdbarch)
4054 {
4055 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4056
4057 if (tdep->neon_double_type == NULL)
4058 {
4059 struct type *t, *elem;
4060
4061 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4062 TYPE_CODE_UNION);
4063 elem = builtin_type (gdbarch)->builtin_uint8;
4064 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4065 elem = builtin_type (gdbarch)->builtin_uint16;
4066 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4067 elem = builtin_type (gdbarch)->builtin_uint32;
4068 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4069 elem = builtin_type (gdbarch)->builtin_uint64;
4070 append_composite_type_field (t, "u64", elem);
4071 elem = builtin_type (gdbarch)->builtin_float;
4072 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4073 elem = builtin_type (gdbarch)->builtin_double;
4074 append_composite_type_field (t, "f64", elem);
4075
4076 TYPE_VECTOR (t) = 1;
4077 TYPE_NAME (t) = "neon_d";
4078 tdep->neon_double_type = t;
4079 }
4080
4081 return tdep->neon_double_type;
4082 }
4083
4084 /* FIXME: The vector types are not correctly ordered on big-endian
4085 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4086 bits of d0 - regardless of what unit size is being held in d0. So
4087 the offset of the first uint8 in d0 is 7, but the offset of the
4088 first float is 4. This code works as-is for little-endian
4089 targets. */
4090
4091 static struct type *
4092 arm_neon_quad_type (struct gdbarch *gdbarch)
4093 {
4094 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4095
4096 if (tdep->neon_quad_type == NULL)
4097 {
4098 struct type *t, *elem;
4099
4100 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4101 TYPE_CODE_UNION);
4102 elem = builtin_type (gdbarch)->builtin_uint8;
4103 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4104 elem = builtin_type (gdbarch)->builtin_uint16;
4105 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4106 elem = builtin_type (gdbarch)->builtin_uint32;
4107 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4108 elem = builtin_type (gdbarch)->builtin_uint64;
4109 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4110 elem = builtin_type (gdbarch)->builtin_float;
4111 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4112 elem = builtin_type (gdbarch)->builtin_double;
4113 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4114
4115 TYPE_VECTOR (t) = 1;
4116 TYPE_NAME (t) = "neon_q";
4117 tdep->neon_quad_type = t;
4118 }
4119
4120 return tdep->neon_quad_type;
4121 }
4122
4123 /* Return the GDB type object for the "standard" data type of data in
4124 register N. */
4125
4126 static struct type *
4127 arm_register_type (struct gdbarch *gdbarch, int regnum)
4128 {
4129 int num_regs = gdbarch_num_regs (gdbarch);
4130
4131 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4132 && regnum >= num_regs && regnum < num_regs + 32)
4133 return builtin_type (gdbarch)->builtin_float;
4134
4135 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4136 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4137 return arm_neon_quad_type (gdbarch);
4138
4139 /* If the target description has register information, we are only
4140 in this function so that we can override the types of
4141 double-precision registers for NEON. */
4142 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4143 {
4144 struct type *t = tdesc_register_type (gdbarch, regnum);
4145
4146 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4147 && TYPE_CODE (t) == TYPE_CODE_FLT
4148 && gdbarch_tdep (gdbarch)->have_neon)
4149 return arm_neon_double_type (gdbarch);
4150 else
4151 return t;
4152 }
4153
4154 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4155 {
4156 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4157 return builtin_type (gdbarch)->builtin_void;
4158
4159 return arm_ext_type (gdbarch);
4160 }
4161 else if (regnum == ARM_SP_REGNUM)
4162 return builtin_type (gdbarch)->builtin_data_ptr;
4163 else if (regnum == ARM_PC_REGNUM)
4164 return builtin_type (gdbarch)->builtin_func_ptr;
4165 else if (regnum >= ARRAY_SIZE (arm_register_names))
4166 /* These registers are only supported on targets which supply
4167 an XML description. */
4168 return builtin_type (gdbarch)->builtin_int0;
4169 else
4170 return builtin_type (gdbarch)->builtin_uint32;
4171 }
4172
4173 /* Map a DWARF register REGNUM onto the appropriate GDB register
4174 number. */
4175
4176 static int
4177 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4178 {
4179 /* Core integer regs. */
4180 if (reg >= 0 && reg <= 15)
4181 return reg;
4182
4183 /* Legacy FPA encoding. These were once used in a way which
4184 overlapped with VFP register numbering, so their use is
4185 discouraged, but GDB doesn't support the ARM toolchain
4186 which used them for VFP. */
4187 if (reg >= 16 && reg <= 23)
4188 return ARM_F0_REGNUM + reg - 16;
4189
4190 /* New assignments for the FPA registers. */
4191 if (reg >= 96 && reg <= 103)
4192 return ARM_F0_REGNUM + reg - 96;
4193
4194 /* WMMX register assignments. */
4195 if (reg >= 104 && reg <= 111)
4196 return ARM_WCGR0_REGNUM + reg - 104;
4197
4198 if (reg >= 112 && reg <= 127)
4199 return ARM_WR0_REGNUM + reg - 112;
4200
4201 if (reg >= 192 && reg <= 199)
4202 return ARM_WC0_REGNUM + reg - 192;
4203
4204 /* VFP v2 registers. A double precision value is actually
4205 in d1 rather than s2, but the ABI only defines numbering
4206 for the single precision registers. This will "just work"
4207 in GDB for little endian targets (we'll read eight bytes,
4208 starting in s0 and then progressing to s1), but will be
4209 reversed on big endian targets with VFP. This won't
4210 be a problem for the new Neon quad registers; you're supposed
4211 to use DW_OP_piece for those. */
4212 if (reg >= 64 && reg <= 95)
4213 {
4214 char name_buf[4];
4215
4216 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4217 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4218 strlen (name_buf));
4219 }
4220
4221 /* VFP v3 / Neon registers. This range is also used for VFP v2
4222 registers, except that it now describes d0 instead of s0. */
4223 if (reg >= 256 && reg <= 287)
4224 {
4225 char name_buf[4];
4226
4227 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4228 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4229 strlen (name_buf));
4230 }
4231
4232 return -1;
4233 }
4234
4235 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4236 static int
4237 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4238 {
4239 int reg = regnum;
4240 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4241
4242 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4243 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4244
4245 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4246 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4247
4248 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4249 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4250
4251 if (reg < NUM_GREGS)
4252 return SIM_ARM_R0_REGNUM + reg;
4253 reg -= NUM_GREGS;
4254
4255 if (reg < NUM_FREGS)
4256 return SIM_ARM_FP0_REGNUM + reg;
4257 reg -= NUM_FREGS;
4258
4259 if (reg < NUM_SREGS)
4260 return SIM_ARM_FPS_REGNUM + reg;
4261 reg -= NUM_SREGS;
4262
4263 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4264 }
4265
4266 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4267 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4268 It is thought that this is is the floating-point register format on
4269 little-endian systems. */
4270
4271 static void
4272 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4273 void *dbl, int endianess)
4274 {
4275 DOUBLEST d;
4276
4277 if (endianess == BFD_ENDIAN_BIG)
4278 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4279 else
4280 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4281 ptr, &d);
4282 floatformat_from_doublest (fmt, &d, dbl);
4283 }
4284
4285 static void
4286 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4287 int endianess)
4288 {
4289 DOUBLEST d;
4290
4291 floatformat_to_doublest (fmt, ptr, &d);
4292 if (endianess == BFD_ENDIAN_BIG)
4293 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4294 else
4295 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4296 &d, dbl);
4297 }
4298
4299 static int
4300 condition_true (unsigned long cond, unsigned long status_reg)
4301 {
4302 if (cond == INST_AL || cond == INST_NV)
4303 return 1;
4304
4305 switch (cond)
4306 {
4307 case INST_EQ:
4308 return ((status_reg & FLAG_Z) != 0);
4309 case INST_NE:
4310 return ((status_reg & FLAG_Z) == 0);
4311 case INST_CS:
4312 return ((status_reg & FLAG_C) != 0);
4313 case INST_CC:
4314 return ((status_reg & FLAG_C) == 0);
4315 case INST_MI:
4316 return ((status_reg & FLAG_N) != 0);
4317 case INST_PL:
4318 return ((status_reg & FLAG_N) == 0);
4319 case INST_VS:
4320 return ((status_reg & FLAG_V) != 0);
4321 case INST_VC:
4322 return ((status_reg & FLAG_V) == 0);
4323 case INST_HI:
4324 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4325 case INST_LS:
4326 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4327 case INST_GE:
4328 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4329 case INST_LT:
4330 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4331 case INST_GT:
4332 return (((status_reg & FLAG_Z) == 0)
4333 && (((status_reg & FLAG_N) == 0)
4334 == ((status_reg & FLAG_V) == 0)));
4335 case INST_LE:
4336 return (((status_reg & FLAG_Z) != 0)
4337 || (((status_reg & FLAG_N) == 0)
4338 != ((status_reg & FLAG_V) == 0)));
4339 }
4340 return 1;
4341 }
4342
4343 static unsigned long
4344 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4345 unsigned long pc_val, unsigned long status_reg)
4346 {
4347 unsigned long res, shift;
4348 int rm = bits (inst, 0, 3);
4349 unsigned long shifttype = bits (inst, 5, 6);
4350
4351 if (bit (inst, 4))
4352 {
4353 int rs = bits (inst, 8, 11);
4354 shift = (rs == 15 ? pc_val + 8
4355 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4356 }
4357 else
4358 shift = bits (inst, 7, 11);
4359
4360 res = (rm == ARM_PC_REGNUM
4361 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4362 : get_frame_register_unsigned (frame, rm));
4363
4364 switch (shifttype)
4365 {
4366 case 0: /* LSL */
4367 res = shift >= 32 ? 0 : res << shift;
4368 break;
4369
4370 case 1: /* LSR */
4371 res = shift >= 32 ? 0 : res >> shift;
4372 break;
4373
4374 case 2: /* ASR */
4375 if (shift >= 32)
4376 shift = 31;
4377 res = ((res & 0x80000000L)
4378 ? ~((~res) >> shift) : res >> shift);
4379 break;
4380
4381 case 3: /* ROR/RRX */
4382 shift &= 31;
4383 if (shift == 0)
4384 res = (res >> 1) | (carry ? 0x80000000L : 0);
4385 else
4386 res = (res >> shift) | (res << (32 - shift));
4387 break;
4388 }
4389
4390 return res & 0xffffffff;
4391 }
4392
4393 /* Return number of 1-bits in VAL. */
4394
4395 static int
4396 bitcount (unsigned long val)
4397 {
4398 int nbits;
4399 for (nbits = 0; val != 0; nbits++)
4400 val &= val - 1; /* Delete rightmost 1-bit in val. */
4401 return nbits;
4402 }
4403
4404 /* Return the size in bytes of the complete Thumb instruction whose
4405 first halfword is INST1. */
4406
4407 static int
4408 thumb_insn_size (unsigned short inst1)
4409 {
4410 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4411 return 4;
4412 else
4413 return 2;
4414 }
4415
4416 static int
4417 thumb_advance_itstate (unsigned int itstate)
4418 {
4419 /* Preserve IT[7:5], the first three bits of the condition. Shift
4420 the upcoming condition flags left by one bit. */
4421 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4422
4423 /* If we have finished the IT block, clear the state. */
4424 if ((itstate & 0x0f) == 0)
4425 itstate = 0;
4426
4427 return itstate;
4428 }
4429
4430 /* Find the next PC after the current instruction executes. In some
4431 cases we can not statically determine the answer (see the IT state
4432 handling in this function); in that case, a breakpoint may be
4433 inserted in addition to the returned PC, which will be used to set
4434 another breakpoint by our caller. */
4435
4436 static CORE_ADDR
4437 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4438 {
4439 struct gdbarch *gdbarch = get_frame_arch (frame);
4440 struct address_space *aspace = get_frame_address_space (frame);
4441 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4442 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4443 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4444 unsigned short inst1;
4445 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4446 unsigned long offset;
4447 ULONGEST status, itstate;
4448
4449 nextpc = MAKE_THUMB_ADDR (nextpc);
4450 pc_val = MAKE_THUMB_ADDR (pc_val);
4451
4452 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4453
4454 /* Thumb-2 conditional execution support. There are eight bits in
4455 the CPSR which describe conditional execution state. Once
4456 reconstructed (they're in a funny order), the low five bits
4457 describe the low bit of the condition for each instruction and
4458 how many instructions remain. The high three bits describe the
4459 base condition. One of the low four bits will be set if an IT
4460 block is active. These bits read as zero on earlier
4461 processors. */
4462 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4463 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4464
4465 /* If-Then handling. On GNU/Linux, where this routine is used, we
4466 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4467 can disable execution of the undefined instruction. So we might
4468 miss the breakpoint if we set it on a skipped conditional
4469 instruction. Because conditional instructions can change the
4470 flags, affecting the execution of further instructions, we may
4471 need to set two breakpoints. */
4472
4473 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4474 {
4475 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4476 {
4477 /* An IT instruction. Because this instruction does not
4478 modify the flags, we can accurately predict the next
4479 executed instruction. */
4480 itstate = inst1 & 0x00ff;
4481 pc += thumb_insn_size (inst1);
4482
4483 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4484 {
4485 inst1 = read_memory_unsigned_integer (pc, 2,
4486 byte_order_for_code);
4487 pc += thumb_insn_size (inst1);
4488 itstate = thumb_advance_itstate (itstate);
4489 }
4490
4491 return MAKE_THUMB_ADDR (pc);
4492 }
4493 else if (itstate != 0)
4494 {
4495 /* We are in a conditional block. Check the condition. */
4496 if (! condition_true (itstate >> 4, status))
4497 {
4498 /* Advance to the next executed instruction. */
4499 pc += thumb_insn_size (inst1);
4500 itstate = thumb_advance_itstate (itstate);
4501
4502 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4503 {
4504 inst1 = read_memory_unsigned_integer (pc, 2,
4505 byte_order_for_code);
4506 pc += thumb_insn_size (inst1);
4507 itstate = thumb_advance_itstate (itstate);
4508 }
4509
4510 return MAKE_THUMB_ADDR (pc);
4511 }
4512 else if ((itstate & 0x0f) == 0x08)
4513 {
4514 /* This is the last instruction of the conditional
4515 block, and it is executed. We can handle it normally
4516 because the following instruction is not conditional,
4517 and we must handle it normally because it is
4518 permitted to branch. Fall through. */
4519 }
4520 else
4521 {
4522 int cond_negated;
4523
4524 /* There are conditional instructions after this one.
4525 If this instruction modifies the flags, then we can
4526 not predict what the next executed instruction will
4527 be. Fortunately, this instruction is architecturally
4528 forbidden to branch; we know it will fall through.
4529 Start by skipping past it. */
4530 pc += thumb_insn_size (inst1);
4531 itstate = thumb_advance_itstate (itstate);
4532
4533 /* Set a breakpoint on the following instruction. */
4534 gdb_assert ((itstate & 0x0f) != 0);
4535 arm_insert_single_step_breakpoint (gdbarch, aspace,
4536 MAKE_THUMB_ADDR (pc));
4537 cond_negated = (itstate >> 4) & 1;
4538
4539 /* Skip all following instructions with the same
4540 condition. If there is a later instruction in the IT
4541 block with the opposite condition, set the other
4542 breakpoint there. If not, then set a breakpoint on
4543 the instruction after the IT block. */
4544 do
4545 {
4546 inst1 = read_memory_unsigned_integer (pc, 2,
4547 byte_order_for_code);
4548 pc += thumb_insn_size (inst1);
4549 itstate = thumb_advance_itstate (itstate);
4550 }
4551 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4552
4553 return MAKE_THUMB_ADDR (pc);
4554 }
4555 }
4556 }
4557 else if (itstate & 0x0f)
4558 {
4559 /* We are in a conditional block. Check the condition. */
4560 int cond = itstate >> 4;
4561
4562 if (! condition_true (cond, status))
4563 /* Advance to the next instruction. All the 32-bit
4564 instructions share a common prefix. */
4565 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4566
4567 /* Otherwise, handle the instruction normally. */
4568 }
4569
4570 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4571 {
4572 CORE_ADDR sp;
4573
4574 /* Fetch the saved PC from the stack. It's stored above
4575 all of the other registers. */
4576 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4577 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4578 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4579 }
4580 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4581 {
4582 unsigned long cond = bits (inst1, 8, 11);
4583 if (cond == 0x0f) /* 0x0f = SWI */
4584 {
4585 struct gdbarch_tdep *tdep;
4586 tdep = gdbarch_tdep (gdbarch);
4587
4588 if (tdep->syscall_next_pc != NULL)
4589 nextpc = tdep->syscall_next_pc (frame);
4590
4591 }
4592 else if (cond != 0x0f && condition_true (cond, status))
4593 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4594 }
4595 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4596 {
4597 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4598 }
4599 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4600 {
4601 unsigned short inst2;
4602 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4603
4604 /* Default to the next instruction. */
4605 nextpc = pc + 4;
4606 nextpc = MAKE_THUMB_ADDR (nextpc);
4607
4608 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4609 {
4610 /* Branches and miscellaneous control instructions. */
4611
4612 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4613 {
4614 /* B, BL, BLX. */
4615 int j1, j2, imm1, imm2;
4616
4617 imm1 = sbits (inst1, 0, 10);
4618 imm2 = bits (inst2, 0, 10);
4619 j1 = bit (inst2, 13);
4620 j2 = bit (inst2, 11);
4621
4622 offset = ((imm1 << 12) + (imm2 << 1));
4623 offset ^= ((!j2) << 22) | ((!j1) << 23);
4624
4625 nextpc = pc_val + offset;
4626 /* For BLX make sure to clear the low bits. */
4627 if (bit (inst2, 12) == 0)
4628 nextpc = nextpc & 0xfffffffc;
4629 }
4630 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4631 {
4632 /* SUBS PC, LR, #imm8. */
4633 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4634 nextpc -= inst2 & 0x00ff;
4635 }
4636 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4637 {
4638 /* Conditional branch. */
4639 if (condition_true (bits (inst1, 6, 9), status))
4640 {
4641 int sign, j1, j2, imm1, imm2;
4642
4643 sign = sbits (inst1, 10, 10);
4644 imm1 = bits (inst1, 0, 5);
4645 imm2 = bits (inst2, 0, 10);
4646 j1 = bit (inst2, 13);
4647 j2 = bit (inst2, 11);
4648
4649 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4650 offset += (imm1 << 12) + (imm2 << 1);
4651
4652 nextpc = pc_val + offset;
4653 }
4654 }
4655 }
4656 else if ((inst1 & 0xfe50) == 0xe810)
4657 {
4658 /* Load multiple or RFE. */
4659 int rn, offset, load_pc = 1;
4660
4661 rn = bits (inst1, 0, 3);
4662 if (bit (inst1, 7) && !bit (inst1, 8))
4663 {
4664 /* LDMIA or POP */
4665 if (!bit (inst2, 15))
4666 load_pc = 0;
4667 offset = bitcount (inst2) * 4 - 4;
4668 }
4669 else if (!bit (inst1, 7) && bit (inst1, 8))
4670 {
4671 /* LDMDB */
4672 if (!bit (inst2, 15))
4673 load_pc = 0;
4674 offset = -4;
4675 }
4676 else if (bit (inst1, 7) && bit (inst1, 8))
4677 {
4678 /* RFEIA */
4679 offset = 0;
4680 }
4681 else if (!bit (inst1, 7) && !bit (inst1, 8))
4682 {
4683 /* RFEDB */
4684 offset = -8;
4685 }
4686 else
4687 load_pc = 0;
4688
4689 if (load_pc)
4690 {
4691 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4692 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4693 }
4694 }
4695 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4696 {
4697 /* MOV PC or MOVS PC. */
4698 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4699 nextpc = MAKE_THUMB_ADDR (nextpc);
4700 }
4701 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4702 {
4703 /* LDR PC. */
4704 CORE_ADDR base;
4705 int rn, load_pc = 1;
4706
4707 rn = bits (inst1, 0, 3);
4708 base = get_frame_register_unsigned (frame, rn);
4709 if (rn == ARM_PC_REGNUM)
4710 {
4711 base = (base + 4) & ~(CORE_ADDR) 0x3;
4712 if (bit (inst1, 7))
4713 base += bits (inst2, 0, 11);
4714 else
4715 base -= bits (inst2, 0, 11);
4716 }
4717 else if (bit (inst1, 7))
4718 base += bits (inst2, 0, 11);
4719 else if (bit (inst2, 11))
4720 {
4721 if (bit (inst2, 10))
4722 {
4723 if (bit (inst2, 9))
4724 base += bits (inst2, 0, 7);
4725 else
4726 base -= bits (inst2, 0, 7);
4727 }
4728 }
4729 else if ((inst2 & 0x0fc0) == 0x0000)
4730 {
4731 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4732 base += get_frame_register_unsigned (frame, rm) << shift;
4733 }
4734 else
4735 /* Reserved. */
4736 load_pc = 0;
4737
4738 if (load_pc)
4739 nextpc = get_frame_memory_unsigned (frame, base, 4);
4740 }
4741 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4742 {
4743 /* TBB. */
4744 CORE_ADDR tbl_reg, table, offset, length;
4745
4746 tbl_reg = bits (inst1, 0, 3);
4747 if (tbl_reg == 0x0f)
4748 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4749 else
4750 table = get_frame_register_unsigned (frame, tbl_reg);
4751
4752 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4753 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4754 nextpc = pc_val + length;
4755 }
4756 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4757 {
4758 /* TBH. */
4759 CORE_ADDR tbl_reg, table, offset, length;
4760
4761 tbl_reg = bits (inst1, 0, 3);
4762 if (tbl_reg == 0x0f)
4763 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4764 else
4765 table = get_frame_register_unsigned (frame, tbl_reg);
4766
4767 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4768 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4769 nextpc = pc_val + length;
4770 }
4771 }
4772 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4773 {
4774 if (bits (inst1, 3, 6) == 0x0f)
4775 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4776 else
4777 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4778 }
4779 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4780 {
4781 if (bits (inst1, 3, 6) == 0x0f)
4782 nextpc = pc_val;
4783 else
4784 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4785
4786 nextpc = MAKE_THUMB_ADDR (nextpc);
4787 }
4788 else if ((inst1 & 0xf500) == 0xb100)
4789 {
4790 /* CBNZ or CBZ. */
4791 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4792 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4793
4794 if (bit (inst1, 11) && reg != 0)
4795 nextpc = pc_val + imm;
4796 else if (!bit (inst1, 11) && reg == 0)
4797 nextpc = pc_val + imm;
4798 }
4799 return nextpc;
4800 }
4801
4802 /* Get the raw next address. PC is the current program counter, in
4803 FRAME, which is assumed to be executing in ARM mode.
4804
4805 The value returned has the execution state of the next instruction
4806 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4807 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4808 address. */
4809
4810 static CORE_ADDR
4811 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4812 {
4813 struct gdbarch *gdbarch = get_frame_arch (frame);
4814 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4815 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4816 unsigned long pc_val;
4817 unsigned long this_instr;
4818 unsigned long status;
4819 CORE_ADDR nextpc;
4820
4821 pc_val = (unsigned long) pc;
4822 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4823
4824 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4825 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4826
4827 if (bits (this_instr, 28, 31) == INST_NV)
4828 switch (bits (this_instr, 24, 27))
4829 {
4830 case 0xa:
4831 case 0xb:
4832 {
4833 /* Branch with Link and change to Thumb. */
4834 nextpc = BranchDest (pc, this_instr);
4835 nextpc |= bit (this_instr, 24) << 1;
4836 nextpc = MAKE_THUMB_ADDR (nextpc);
4837 break;
4838 }
4839 case 0xc:
4840 case 0xd:
4841 case 0xe:
4842 /* Coprocessor register transfer. */
4843 if (bits (this_instr, 12, 15) == 15)
4844 error (_("Invalid update to pc in instruction"));
4845 break;
4846 }
4847 else if (condition_true (bits (this_instr, 28, 31), status))
4848 {
4849 switch (bits (this_instr, 24, 27))
4850 {
4851 case 0x0:
4852 case 0x1: /* data processing */
4853 case 0x2:
4854 case 0x3:
4855 {
4856 unsigned long operand1, operand2, result = 0;
4857 unsigned long rn;
4858 int c;
4859
4860 if (bits (this_instr, 12, 15) != 15)
4861 break;
4862
4863 if (bits (this_instr, 22, 25) == 0
4864 && bits (this_instr, 4, 7) == 9) /* multiply */
4865 error (_("Invalid update to pc in instruction"));
4866
4867 /* BX <reg>, BLX <reg> */
4868 if (bits (this_instr, 4, 27) == 0x12fff1
4869 || bits (this_instr, 4, 27) == 0x12fff3)
4870 {
4871 rn = bits (this_instr, 0, 3);
4872 nextpc = ((rn == ARM_PC_REGNUM)
4873 ? (pc_val + 8)
4874 : get_frame_register_unsigned (frame, rn));
4875
4876 return nextpc;
4877 }
4878
4879 /* Multiply into PC. */
4880 c = (status & FLAG_C) ? 1 : 0;
4881 rn = bits (this_instr, 16, 19);
4882 operand1 = ((rn == ARM_PC_REGNUM)
4883 ? (pc_val + 8)
4884 : get_frame_register_unsigned (frame, rn));
4885
4886 if (bit (this_instr, 25))
4887 {
4888 unsigned long immval = bits (this_instr, 0, 7);
4889 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4890 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4891 & 0xffffffff;
4892 }
4893 else /* operand 2 is a shifted register. */
4894 operand2 = shifted_reg_val (frame, this_instr, c,
4895 pc_val, status);
4896
4897 switch (bits (this_instr, 21, 24))
4898 {
4899 case 0x0: /*and */
4900 result = operand1 & operand2;
4901 break;
4902
4903 case 0x1: /*eor */
4904 result = operand1 ^ operand2;
4905 break;
4906
4907 case 0x2: /*sub */
4908 result = operand1 - operand2;
4909 break;
4910
4911 case 0x3: /*rsb */
4912 result = operand2 - operand1;
4913 break;
4914
4915 case 0x4: /*add */
4916 result = operand1 + operand2;
4917 break;
4918
4919 case 0x5: /*adc */
4920 result = operand1 + operand2 + c;
4921 break;
4922
4923 case 0x6: /*sbc */
4924 result = operand1 - operand2 + c;
4925 break;
4926
4927 case 0x7: /*rsc */
4928 result = operand2 - operand1 + c;
4929 break;
4930
4931 case 0x8:
4932 case 0x9:
4933 case 0xa:
4934 case 0xb: /* tst, teq, cmp, cmn */
4935 result = (unsigned long) nextpc;
4936 break;
4937
4938 case 0xc: /*orr */
4939 result = operand1 | operand2;
4940 break;
4941
4942 case 0xd: /*mov */
4943 /* Always step into a function. */
4944 result = operand2;
4945 break;
4946
4947 case 0xe: /*bic */
4948 result = operand1 & ~operand2;
4949 break;
4950
4951 case 0xf: /*mvn */
4952 result = ~operand2;
4953 break;
4954 }
4955
4956 /* In 26-bit APCS the bottom two bits of the result are
4957 ignored, and we always end up in ARM state. */
4958 if (!arm_apcs_32)
4959 nextpc = arm_addr_bits_remove (gdbarch, result);
4960 else
4961 nextpc = result;
4962
4963 break;
4964 }
4965
4966 case 0x4:
4967 case 0x5: /* data transfer */
4968 case 0x6:
4969 case 0x7:
4970 if (bit (this_instr, 20))
4971 {
4972 /* load */
4973 if (bits (this_instr, 12, 15) == 15)
4974 {
4975 /* rd == pc */
4976 unsigned long rn;
4977 unsigned long base;
4978
4979 if (bit (this_instr, 22))
4980 error (_("Invalid update to pc in instruction"));
4981
4982 /* byte write to PC */
4983 rn = bits (this_instr, 16, 19);
4984 base = ((rn == ARM_PC_REGNUM)
4985 ? (pc_val + 8)
4986 : get_frame_register_unsigned (frame, rn));
4987
4988 if (bit (this_instr, 24))
4989 {
4990 /* pre-indexed */
4991 int c = (status & FLAG_C) ? 1 : 0;
4992 unsigned long offset =
4993 (bit (this_instr, 25)
4994 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4995 : bits (this_instr, 0, 11));
4996
4997 if (bit (this_instr, 23))
4998 base += offset;
4999 else
5000 base -= offset;
5001 }
5002 nextpc =
5003 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
5004 4, byte_order);
5005 }
5006 }
5007 break;
5008
5009 case 0x8:
5010 case 0x9: /* block transfer */
5011 if (bit (this_instr, 20))
5012 {
5013 /* LDM */
5014 if (bit (this_instr, 15))
5015 {
5016 /* loading pc */
5017 int offset = 0;
5018 unsigned long rn_val
5019 = get_frame_register_unsigned (frame,
5020 bits (this_instr, 16, 19));
5021
5022 if (bit (this_instr, 23))
5023 {
5024 /* up */
5025 unsigned long reglist = bits (this_instr, 0, 14);
5026 offset = bitcount (reglist) * 4;
5027 if (bit (this_instr, 24)) /* pre */
5028 offset += 4;
5029 }
5030 else if (bit (this_instr, 24))
5031 offset = -4;
5032
5033 nextpc =
5034 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5035 (rn_val + offset),
5036 4, byte_order);
5037 }
5038 }
5039 break;
5040
5041 case 0xb: /* branch & link */
5042 case 0xa: /* branch */
5043 {
5044 nextpc = BranchDest (pc, this_instr);
5045 break;
5046 }
5047
5048 case 0xc:
5049 case 0xd:
5050 case 0xe: /* coproc ops */
5051 break;
5052 case 0xf: /* SWI */
5053 {
5054 struct gdbarch_tdep *tdep;
5055 tdep = gdbarch_tdep (gdbarch);
5056
5057 if (tdep->syscall_next_pc != NULL)
5058 nextpc = tdep->syscall_next_pc (frame);
5059
5060 }
5061 break;
5062
5063 default:
5064 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5065 return (pc);
5066 }
5067 }
5068
5069 return nextpc;
5070 }
5071
5072 /* Determine next PC after current instruction executes. Will call either
5073 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5074 loop is detected. */
5075
5076 CORE_ADDR
5077 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5078 {
5079 CORE_ADDR nextpc;
5080
5081 if (arm_frame_is_thumb (frame))
5082 nextpc = thumb_get_next_pc_raw (frame, pc);
5083 else
5084 nextpc = arm_get_next_pc_raw (frame, pc);
5085
5086 return nextpc;
5087 }
5088
5089 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5090 of the appropriate mode (as encoded in the PC value), even if this
5091 differs from what would be expected according to the symbol tables. */
5092
5093 void
5094 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5095 struct address_space *aspace,
5096 CORE_ADDR pc)
5097 {
5098 struct cleanup *old_chain
5099 = make_cleanup_restore_integer (&arm_override_mode);
5100
5101 arm_override_mode = IS_THUMB_ADDR (pc);
5102 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5103
5104 insert_single_step_breakpoint (gdbarch, aspace, pc);
5105
5106 do_cleanups (old_chain);
5107 }
5108
5109 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5110 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5111 is found, attempt to step through it. A breakpoint is placed at the end of
5112 the sequence. */
5113
5114 static int
5115 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5116 {
5117 struct gdbarch *gdbarch = get_frame_arch (frame);
5118 struct address_space *aspace = get_frame_address_space (frame);
5119 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5120 CORE_ADDR pc = get_frame_pc (frame);
5121 CORE_ADDR breaks[2] = {-1, -1};
5122 CORE_ADDR loc = pc;
5123 unsigned short insn1, insn2;
5124 int insn_count;
5125 int index;
5126 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5127 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5128 ULONGEST status, itstate;
5129
5130 /* We currently do not support atomic sequences within an IT block. */
5131 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5132 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5133 if (itstate & 0x0f)
5134 return 0;
5135
5136 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5137 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5138 loc += 2;
5139 if (thumb_insn_size (insn1) != 4)
5140 return 0;
5141
5142 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5143 loc += 2;
5144 if (!((insn1 & 0xfff0) == 0xe850
5145 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5146 return 0;
5147
5148 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5149 instructions. */
5150 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5151 {
5152 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5153 loc += 2;
5154
5155 if (thumb_insn_size (insn1) != 4)
5156 {
5157 /* Assume that there is at most one conditional branch in the
5158 atomic sequence. If a conditional branch is found, put a
5159 breakpoint in its destination address. */
5160 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5161 {
5162 if (last_breakpoint > 0)
5163 return 0; /* More than one conditional branch found,
5164 fallback to the standard code. */
5165
5166 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5167 last_breakpoint++;
5168 }
5169
5170 /* We do not support atomic sequences that use any *other*
5171 instructions but conditional branches to change the PC.
5172 Fall back to standard code to avoid losing control of
5173 execution. */
5174 else if (thumb_instruction_changes_pc (insn1))
5175 return 0;
5176 }
5177 else
5178 {
5179 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5180 loc += 2;
5181
5182 /* Assume that there is at most one conditional branch in the
5183 atomic sequence. If a conditional branch is found, put a
5184 breakpoint in its destination address. */
5185 if ((insn1 & 0xf800) == 0xf000
5186 && (insn2 & 0xd000) == 0x8000
5187 && (insn1 & 0x0380) != 0x0380)
5188 {
5189 int sign, j1, j2, imm1, imm2;
5190 unsigned int offset;
5191
5192 sign = sbits (insn1, 10, 10);
5193 imm1 = bits (insn1, 0, 5);
5194 imm2 = bits (insn2, 0, 10);
5195 j1 = bit (insn2, 13);
5196 j2 = bit (insn2, 11);
5197
5198 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5199 offset += (imm1 << 12) + (imm2 << 1);
5200
5201 if (last_breakpoint > 0)
5202 return 0; /* More than one conditional branch found,
5203 fallback to the standard code. */
5204
5205 breaks[1] = loc + offset;
5206 last_breakpoint++;
5207 }
5208
5209 /* We do not support atomic sequences that use any *other*
5210 instructions but conditional branches to change the PC.
5211 Fall back to standard code to avoid losing control of
5212 execution. */
5213 else if (thumb2_instruction_changes_pc (insn1, insn2))
5214 return 0;
5215
5216 /* If we find a strex{,b,h,d}, we're done. */
5217 if ((insn1 & 0xfff0) == 0xe840
5218 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5219 break;
5220 }
5221 }
5222
5223 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5224 if (insn_count == atomic_sequence_length)
5225 return 0;
5226
5227 /* Insert a breakpoint right after the end of the atomic sequence. */
5228 breaks[0] = loc;
5229
5230 /* Check for duplicated breakpoints. Check also for a breakpoint
5231 placed (branch instruction's destination) anywhere in sequence. */
5232 if (last_breakpoint
5233 && (breaks[1] == breaks[0]
5234 || (breaks[1] >= pc && breaks[1] < loc)))
5235 last_breakpoint = 0;
5236
5237 /* Effectively inserts the breakpoints. */
5238 for (index = 0; index <= last_breakpoint; index++)
5239 arm_insert_single_step_breakpoint (gdbarch, aspace,
5240 MAKE_THUMB_ADDR (breaks[index]));
5241
5242 return 1;
5243 }
5244
5245 static int
5246 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5247 {
5248 struct gdbarch *gdbarch = get_frame_arch (frame);
5249 struct address_space *aspace = get_frame_address_space (frame);
5250 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5251 CORE_ADDR pc = get_frame_pc (frame);
5252 CORE_ADDR breaks[2] = {-1, -1};
5253 CORE_ADDR loc = pc;
5254 unsigned int insn;
5255 int insn_count;
5256 int index;
5257 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5258 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5259
5260 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5261 Note that we do not currently support conditionally executed atomic
5262 instructions. */
5263 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5264 loc += 4;
5265 if ((insn & 0xff9000f0) != 0xe1900090)
5266 return 0;
5267
5268 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5269 instructions. */
5270 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5271 {
5272 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5273 loc += 4;
5274
5275 /* Assume that there is at most one conditional branch in the atomic
5276 sequence. If a conditional branch is found, put a breakpoint in
5277 its destination address. */
5278 if (bits (insn, 24, 27) == 0xa)
5279 {
5280 if (last_breakpoint > 0)
5281 return 0; /* More than one conditional branch found, fallback
5282 to the standard single-step code. */
5283
5284 breaks[1] = BranchDest (loc - 4, insn);
5285 last_breakpoint++;
5286 }
5287
5288 /* We do not support atomic sequences that use any *other* instructions
5289 but conditional branches to change the PC. Fall back to standard
5290 code to avoid losing control of execution. */
5291 else if (arm_instruction_changes_pc (insn))
5292 return 0;
5293
5294 /* If we find a strex{,b,h,d}, we're done. */
5295 if ((insn & 0xff9000f0) == 0xe1800090)
5296 break;
5297 }
5298
5299 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5300 if (insn_count == atomic_sequence_length)
5301 return 0;
5302
5303 /* Insert a breakpoint right after the end of the atomic sequence. */
5304 breaks[0] = loc;
5305
5306 /* Check for duplicated breakpoints. Check also for a breakpoint
5307 placed (branch instruction's destination) anywhere in sequence. */
5308 if (last_breakpoint
5309 && (breaks[1] == breaks[0]
5310 || (breaks[1] >= pc && breaks[1] < loc)))
5311 last_breakpoint = 0;
5312
5313 /* Effectively inserts the breakpoints. */
5314 for (index = 0; index <= last_breakpoint; index++)
5315 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5316
5317 return 1;
5318 }
5319
5320 int
5321 arm_deal_with_atomic_sequence (struct frame_info *frame)
5322 {
5323 if (arm_frame_is_thumb (frame))
5324 return thumb_deal_with_atomic_sequence_raw (frame);
5325 else
5326 return arm_deal_with_atomic_sequence_raw (frame);
5327 }
5328
5329 /* single_step() is called just before we want to resume the inferior,
5330 if we want to single-step it but there is no hardware or kernel
5331 single-step support. We find the target of the coming instruction
5332 and breakpoint it. */
5333
5334 int
5335 arm_software_single_step (struct frame_info *frame)
5336 {
5337 struct gdbarch *gdbarch = get_frame_arch (frame);
5338 struct address_space *aspace = get_frame_address_space (frame);
5339 CORE_ADDR next_pc;
5340
5341 if (arm_deal_with_atomic_sequence (frame))
5342 return 1;
5343
5344 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5345 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5346
5347 return 1;
5348 }
5349
5350 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5351 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5352 NULL if an error occurs. BUF is freed. */
5353
5354 static gdb_byte *
5355 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5356 int old_len, int new_len)
5357 {
5358 gdb_byte *new_buf;
5359 int bytes_to_read = new_len - old_len;
5360
5361 new_buf = xmalloc (new_len);
5362 memcpy (new_buf + bytes_to_read, buf, old_len);
5363 xfree (buf);
5364 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5365 {
5366 xfree (new_buf);
5367 return NULL;
5368 }
5369 return new_buf;
5370 }
5371
5372 /* An IT block is at most the 2-byte IT instruction followed by
5373 four 4-byte instructions. The furthest back we must search to
5374 find an IT block that affects the current instruction is thus
5375 2 + 3 * 4 == 14 bytes. */
5376 #define MAX_IT_BLOCK_PREFIX 14
5377
5378 /* Use a quick scan if there are more than this many bytes of
5379 code. */
5380 #define IT_SCAN_THRESHOLD 32
5381
5382 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5383 A breakpoint in an IT block may not be hit, depending on the
5384 condition flags. */
5385 static CORE_ADDR
5386 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5387 {
5388 gdb_byte *buf;
5389 char map_type;
5390 CORE_ADDR boundary, func_start;
5391 int buf_len;
5392 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5393 int i, any, last_it, last_it_count;
5394
5395 /* If we are using BKPT breakpoints, none of this is necessary. */
5396 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5397 return bpaddr;
5398
5399 /* ARM mode does not have this problem. */
5400 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5401 return bpaddr;
5402
5403 /* We are setting a breakpoint in Thumb code that could potentially
5404 contain an IT block. The first step is to find how much Thumb
5405 code there is; we do not need to read outside of known Thumb
5406 sequences. */
5407 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5408 if (map_type == 0)
5409 /* Thumb-2 code must have mapping symbols to have a chance. */
5410 return bpaddr;
5411
5412 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5413
5414 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5415 && func_start > boundary)
5416 boundary = func_start;
5417
5418 /* Search for a candidate IT instruction. We have to do some fancy
5419 footwork to distinguish a real IT instruction from the second
5420 half of a 32-bit instruction, but there is no need for that if
5421 there's no candidate. */
5422 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5423 if (buf_len == 0)
5424 /* No room for an IT instruction. */
5425 return bpaddr;
5426
5427 buf = xmalloc (buf_len);
5428 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5429 return bpaddr;
5430 any = 0;
5431 for (i = 0; i < buf_len; i += 2)
5432 {
5433 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5434 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5435 {
5436 any = 1;
5437 break;
5438 }
5439 }
5440 if (any == 0)
5441 {
5442 xfree (buf);
5443 return bpaddr;
5444 }
5445
5446 /* OK, the code bytes before this instruction contain at least one
5447 halfword which resembles an IT instruction. We know that it's
5448 Thumb code, but there are still two possibilities. Either the
5449 halfword really is an IT instruction, or it is the second half of
5450 a 32-bit Thumb instruction. The only way we can tell is to
5451 scan forwards from a known instruction boundary. */
5452 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5453 {
5454 int definite;
5455
5456 /* There's a lot of code before this instruction. Start with an
5457 optimistic search; it's easy to recognize halfwords that can
5458 not be the start of a 32-bit instruction, and use that to
5459 lock on to the instruction boundaries. */
5460 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5461 if (buf == NULL)
5462 return bpaddr;
5463 buf_len = IT_SCAN_THRESHOLD;
5464
5465 definite = 0;
5466 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5467 {
5468 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5469 if (thumb_insn_size (inst1) == 2)
5470 {
5471 definite = 1;
5472 break;
5473 }
5474 }
5475
5476 /* At this point, if DEFINITE, BUF[I] is the first place we
5477 are sure that we know the instruction boundaries, and it is far
5478 enough from BPADDR that we could not miss an IT instruction
5479 affecting BPADDR. If ! DEFINITE, give up - start from a
5480 known boundary. */
5481 if (! definite)
5482 {
5483 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5484 bpaddr - boundary);
5485 if (buf == NULL)
5486 return bpaddr;
5487 buf_len = bpaddr - boundary;
5488 i = 0;
5489 }
5490 }
5491 else
5492 {
5493 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5494 if (buf == NULL)
5495 return bpaddr;
5496 buf_len = bpaddr - boundary;
5497 i = 0;
5498 }
5499
5500 /* Scan forwards. Find the last IT instruction before BPADDR. */
5501 last_it = -1;
5502 last_it_count = 0;
5503 while (i < buf_len)
5504 {
5505 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5506 last_it_count--;
5507 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5508 {
5509 last_it = i;
5510 if (inst1 & 0x0001)
5511 last_it_count = 4;
5512 else if (inst1 & 0x0002)
5513 last_it_count = 3;
5514 else if (inst1 & 0x0004)
5515 last_it_count = 2;
5516 else
5517 last_it_count = 1;
5518 }
5519 i += thumb_insn_size (inst1);
5520 }
5521
5522 xfree (buf);
5523
5524 if (last_it == -1)
5525 /* There wasn't really an IT instruction after all. */
5526 return bpaddr;
5527
5528 if (last_it_count < 1)
5529 /* It was too far away. */
5530 return bpaddr;
5531
5532 /* This really is a trouble spot. Move the breakpoint to the IT
5533 instruction. */
5534 return bpaddr - buf_len + last_it;
5535 }
5536
5537 /* ARM displaced stepping support.
5538
5539 Generally ARM displaced stepping works as follows:
5540
5541 1. When an instruction is to be single-stepped, it is first decoded by
5542 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5543 Depending on the type of instruction, it is then copied to a scratch
5544 location, possibly in a modified form. The copy_* set of functions
5545 performs such modification, as necessary. A breakpoint is placed after
5546 the modified instruction in the scratch space to return control to GDB.
5547 Note in particular that instructions which modify the PC will no longer
5548 do so after modification.
5549
5550 2. The instruction is single-stepped, by setting the PC to the scratch
5551 location address, and resuming. Control returns to GDB when the
5552 breakpoint is hit.
5553
5554 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5555 function used for the current instruction. This function's job is to
5556 put the CPU/memory state back to what it would have been if the
5557 instruction had been executed unmodified in its original location. */
5558
5559 /* NOP instruction (mov r0, r0). */
5560 #define ARM_NOP 0xe1a00000
5561 #define THUMB_NOP 0x4600
5562
5563 /* Helper for register reads for displaced stepping. In particular, this
5564 returns the PC as it would be seen by the instruction at its original
5565 location. */
5566
5567 ULONGEST
5568 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5569 int regno)
5570 {
5571 ULONGEST ret;
5572 CORE_ADDR from = dsc->insn_addr;
5573
5574 if (regno == ARM_PC_REGNUM)
5575 {
5576 /* Compute pipeline offset:
5577 - When executing an ARM instruction, PC reads as the address of the
5578 current instruction plus 8.
5579 - When executing a Thumb instruction, PC reads as the address of the
5580 current instruction plus 4. */
5581
5582 if (!dsc->is_thumb)
5583 from += 8;
5584 else
5585 from += 4;
5586
5587 if (debug_displaced)
5588 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5589 (unsigned long) from);
5590 return (ULONGEST) from;
5591 }
5592 else
5593 {
5594 regcache_cooked_read_unsigned (regs, regno, &ret);
5595 if (debug_displaced)
5596 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5597 regno, (unsigned long) ret);
5598 return ret;
5599 }
5600 }
5601
5602 static int
5603 displaced_in_arm_mode (struct regcache *regs)
5604 {
5605 ULONGEST ps;
5606 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5607
5608 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5609
5610 return (ps & t_bit) == 0;
5611 }
5612
5613 /* Write to the PC as from a branch instruction. */
5614
5615 static void
5616 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5617 ULONGEST val)
5618 {
5619 if (!dsc->is_thumb)
5620 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5621 architecture versions < 6. */
5622 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5623 val & ~(ULONGEST) 0x3);
5624 else
5625 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5626 val & ~(ULONGEST) 0x1);
5627 }
5628
5629 /* Write to the PC as from a branch-exchange instruction. */
5630
5631 static void
5632 bx_write_pc (struct regcache *regs, ULONGEST val)
5633 {
5634 ULONGEST ps;
5635 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5636
5637 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5638
5639 if ((val & 1) == 1)
5640 {
5641 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5642 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5643 }
5644 else if ((val & 2) == 0)
5645 {
5646 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5647 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5648 }
5649 else
5650 {
5651 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5652 mode, align dest to 4 bytes). */
5653 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5654 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5655 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5656 }
5657 }
5658
5659 /* Write to the PC as if from a load instruction. */
5660
5661 static void
5662 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5663 ULONGEST val)
5664 {
5665 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5666 bx_write_pc (regs, val);
5667 else
5668 branch_write_pc (regs, dsc, val);
5669 }
5670
5671 /* Write to the PC as if from an ALU instruction. */
5672
5673 static void
5674 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5675 ULONGEST val)
5676 {
5677 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5678 bx_write_pc (regs, val);
5679 else
5680 branch_write_pc (regs, dsc, val);
5681 }
5682
5683 /* Helper for writing to registers for displaced stepping. Writing to the PC
5684 has a varying effects depending on the instruction which does the write:
5685 this is controlled by the WRITE_PC argument. */
5686
5687 void
5688 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5689 int regno, ULONGEST val, enum pc_write_style write_pc)
5690 {
5691 if (regno == ARM_PC_REGNUM)
5692 {
5693 if (debug_displaced)
5694 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5695 (unsigned long) val);
5696 switch (write_pc)
5697 {
5698 case BRANCH_WRITE_PC:
5699 branch_write_pc (regs, dsc, val);
5700 break;
5701
5702 case BX_WRITE_PC:
5703 bx_write_pc (regs, val);
5704 break;
5705
5706 case LOAD_WRITE_PC:
5707 load_write_pc (regs, dsc, val);
5708 break;
5709
5710 case ALU_WRITE_PC:
5711 alu_write_pc (regs, dsc, val);
5712 break;
5713
5714 case CANNOT_WRITE_PC:
5715 warning (_("Instruction wrote to PC in an unexpected way when "
5716 "single-stepping"));
5717 break;
5718
5719 default:
5720 internal_error (__FILE__, __LINE__,
5721 _("Invalid argument to displaced_write_reg"));
5722 }
5723
5724 dsc->wrote_to_pc = 1;
5725 }
5726 else
5727 {
5728 if (debug_displaced)
5729 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5730 regno, (unsigned long) val);
5731 regcache_cooked_write_unsigned (regs, regno, val);
5732 }
5733 }
5734
5735 /* This function is used to concisely determine if an instruction INSN
5736 references PC. Register fields of interest in INSN should have the
5737 corresponding fields of BITMASK set to 0b1111. The function
5738 returns return 1 if any of these fields in INSN reference the PC
5739 (also 0b1111, r15), else it returns 0. */
5740
5741 static int
5742 insn_references_pc (uint32_t insn, uint32_t bitmask)
5743 {
5744 uint32_t lowbit = 1;
5745
5746 while (bitmask != 0)
5747 {
5748 uint32_t mask;
5749
5750 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5751 ;
5752
5753 if (!lowbit)
5754 break;
5755
5756 mask = lowbit * 0xf;
5757
5758 if ((insn & mask) == mask)
5759 return 1;
5760
5761 bitmask &= ~mask;
5762 }
5763
5764 return 0;
5765 }
5766
5767 /* The simplest copy function. Many instructions have the same effect no
5768 matter what address they are executed at: in those cases, use this. */
5769
5770 static int
5771 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5772 const char *iname, struct displaced_step_closure *dsc)
5773 {
5774 if (debug_displaced)
5775 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5776 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5777 iname);
5778
5779 dsc->modinsn[0] = insn;
5780
5781 return 0;
5782 }
5783
5784 static int
5785 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5786 uint16_t insn2, const char *iname,
5787 struct displaced_step_closure *dsc)
5788 {
5789 if (debug_displaced)
5790 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5791 "opcode/class '%s' unmodified\n", insn1, insn2,
5792 iname);
5793
5794 dsc->modinsn[0] = insn1;
5795 dsc->modinsn[1] = insn2;
5796 dsc->numinsns = 2;
5797
5798 return 0;
5799 }
5800
5801 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5802 modification. */
5803 static int
5804 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5805 const char *iname,
5806 struct displaced_step_closure *dsc)
5807 {
5808 if (debug_displaced)
5809 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5810 "opcode/class '%s' unmodified\n", insn,
5811 iname);
5812
5813 dsc->modinsn[0] = insn;
5814
5815 return 0;
5816 }
5817
5818 /* Preload instructions with immediate offset. */
5819
5820 static void
5821 cleanup_preload (struct gdbarch *gdbarch,
5822 struct regcache *regs, struct displaced_step_closure *dsc)
5823 {
5824 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5825 if (!dsc->u.preload.immed)
5826 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5827 }
5828
5829 static void
5830 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5831 struct displaced_step_closure *dsc, unsigned int rn)
5832 {
5833 ULONGEST rn_val;
5834 /* Preload instructions:
5835
5836 {pli/pld} [rn, #+/-imm]
5837 ->
5838 {pli/pld} [r0, #+/-imm]. */
5839
5840 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5841 rn_val = displaced_read_reg (regs, dsc, rn);
5842 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5843 dsc->u.preload.immed = 1;
5844
5845 dsc->cleanup = &cleanup_preload;
5846 }
5847
5848 static int
5849 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5850 struct displaced_step_closure *dsc)
5851 {
5852 unsigned int rn = bits (insn, 16, 19);
5853
5854 if (!insn_references_pc (insn, 0x000f0000ul))
5855 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5856
5857 if (debug_displaced)
5858 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5859 (unsigned long) insn);
5860
5861 dsc->modinsn[0] = insn & 0xfff0ffff;
5862
5863 install_preload (gdbarch, regs, dsc, rn);
5864
5865 return 0;
5866 }
5867
5868 static int
5869 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5870 struct regcache *regs, struct displaced_step_closure *dsc)
5871 {
5872 unsigned int rn = bits (insn1, 0, 3);
5873 unsigned int u_bit = bit (insn1, 7);
5874 int imm12 = bits (insn2, 0, 11);
5875 ULONGEST pc_val;
5876
5877 if (rn != ARM_PC_REGNUM)
5878 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5879
5880 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5881 PLD (literal) Encoding T1. */
5882 if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog,
5884 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5885 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5886 imm12);
5887
5888 if (!u_bit)
5889 imm12 = -1 * imm12;
5890
5891 /* Rewrite instruction {pli/pld} PC imm12 into:
5892 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5893
5894 {pli/pld} [r0, r1]
5895
5896 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5897
5898 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5899 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5900
5901 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5902
5903 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5904 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5905 dsc->u.preload.immed = 0;
5906
5907 /* {pli/pld} [r0, r1] */
5908 dsc->modinsn[0] = insn1 & 0xfff0;
5909 dsc->modinsn[1] = 0xf001;
5910 dsc->numinsns = 2;
5911
5912 dsc->cleanup = &cleanup_preload;
5913 return 0;
5914 }
5915
5916 /* Preload instructions with register offset. */
5917
5918 static void
5919 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5920 struct displaced_step_closure *dsc, unsigned int rn,
5921 unsigned int rm)
5922 {
5923 ULONGEST rn_val, rm_val;
5924
5925 /* Preload register-offset instructions:
5926
5927 {pli/pld} [rn, rm {, shift}]
5928 ->
5929 {pli/pld} [r0, r1 {, shift}]. */
5930
5931 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5932 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5933 rn_val = displaced_read_reg (regs, dsc, rn);
5934 rm_val = displaced_read_reg (regs, dsc, rm);
5935 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5936 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5937 dsc->u.preload.immed = 0;
5938
5939 dsc->cleanup = &cleanup_preload;
5940 }
5941
5942 static int
5943 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5944 struct regcache *regs,
5945 struct displaced_step_closure *dsc)
5946 {
5947 unsigned int rn = bits (insn, 16, 19);
5948 unsigned int rm = bits (insn, 0, 3);
5949
5950
5951 if (!insn_references_pc (insn, 0x000f000ful))
5952 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5953
5954 if (debug_displaced)
5955 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5956 (unsigned long) insn);
5957
5958 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5959
5960 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5961 return 0;
5962 }
5963
5964 /* Copy/cleanup coprocessor load and store instructions. */
5965
5966 static void
5967 cleanup_copro_load_store (struct gdbarch *gdbarch,
5968 struct regcache *regs,
5969 struct displaced_step_closure *dsc)
5970 {
5971 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5972
5973 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5974
5975 if (dsc->u.ldst.writeback)
5976 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5977 }
5978
5979 static void
5980 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5981 struct displaced_step_closure *dsc,
5982 int writeback, unsigned int rn)
5983 {
5984 ULONGEST rn_val;
5985
5986 /* Coprocessor load/store instructions:
5987
5988 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5989 ->
5990 {stc/stc2} [r0, #+/-imm].
5991
5992 ldc/ldc2 are handled identically. */
5993
5994 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5995 rn_val = displaced_read_reg (regs, dsc, rn);
5996 /* PC should be 4-byte aligned. */
5997 rn_val = rn_val & 0xfffffffc;
5998 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5999
6000 dsc->u.ldst.writeback = writeback;
6001 dsc->u.ldst.rn = rn;
6002
6003 dsc->cleanup = &cleanup_copro_load_store;
6004 }
6005
6006 static int
6007 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
6008 struct regcache *regs,
6009 struct displaced_step_closure *dsc)
6010 {
6011 unsigned int rn = bits (insn, 16, 19);
6012
6013 if (!insn_references_pc (insn, 0x000f0000ul))
6014 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
6015
6016 if (debug_displaced)
6017 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6018 "load/store insn %.8lx\n", (unsigned long) insn);
6019
6020 dsc->modinsn[0] = insn & 0xfff0ffff;
6021
6022 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
6023
6024 return 0;
6025 }
6026
6027 static int
6028 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
6029 uint16_t insn2, struct regcache *regs,
6030 struct displaced_step_closure *dsc)
6031 {
6032 unsigned int rn = bits (insn1, 0, 3);
6033
6034 if (rn != ARM_PC_REGNUM)
6035 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6036 "copro load/store", dsc);
6037
6038 if (debug_displaced)
6039 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6040 "load/store insn %.4x%.4x\n", insn1, insn2);
6041
6042 dsc->modinsn[0] = insn1 & 0xfff0;
6043 dsc->modinsn[1] = insn2;
6044 dsc->numinsns = 2;
6045
6046 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6047 doesn't support writeback, so pass 0. */
6048 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6049
6050 return 0;
6051 }
6052
6053 /* Clean up branch instructions (actually perform the branch, by setting
6054 PC). */
6055
6056 static void
6057 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6058 struct displaced_step_closure *dsc)
6059 {
6060 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6061 int branch_taken = condition_true (dsc->u.branch.cond, status);
6062 enum pc_write_style write_pc = dsc->u.branch.exchange
6063 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6064
6065 if (!branch_taken)
6066 return;
6067
6068 if (dsc->u.branch.link)
6069 {
6070 /* The value of LR should be the next insn of current one. In order
6071 not to confuse logic hanlding later insn `bx lr', if current insn mode
6072 is Thumb, the bit 0 of LR value should be set to 1. */
6073 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6074
6075 if (dsc->is_thumb)
6076 next_insn_addr |= 0x1;
6077
6078 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6079 CANNOT_WRITE_PC);
6080 }
6081
6082 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6083 }
6084
6085 /* Copy B/BL/BLX instructions with immediate destinations. */
6086
6087 static void
6088 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6089 struct displaced_step_closure *dsc,
6090 unsigned int cond, int exchange, int link, long offset)
6091 {
6092 /* Implement "BL<cond> <label>" as:
6093
6094 Preparation: cond <- instruction condition
6095 Insn: mov r0, r0 (nop)
6096 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6097
6098 B<cond> similar, but don't set r14 in cleanup. */
6099
6100 dsc->u.branch.cond = cond;
6101 dsc->u.branch.link = link;
6102 dsc->u.branch.exchange = exchange;
6103
6104 dsc->u.branch.dest = dsc->insn_addr;
6105 if (link && exchange)
6106 /* For BLX, offset is computed from the Align (PC, 4). */
6107 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6108
6109 if (dsc->is_thumb)
6110 dsc->u.branch.dest += 4 + offset;
6111 else
6112 dsc->u.branch.dest += 8 + offset;
6113
6114 dsc->cleanup = &cleanup_branch;
6115 }
6116 static int
6117 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6118 struct regcache *regs, struct displaced_step_closure *dsc)
6119 {
6120 unsigned int cond = bits (insn, 28, 31);
6121 int exchange = (cond == 0xf);
6122 int link = exchange || bit (insn, 24);
6123 long offset;
6124
6125 if (debug_displaced)
6126 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6127 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6128 (unsigned long) insn);
6129 if (exchange)
6130 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6131 then arrange the switch into Thumb mode. */
6132 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6133 else
6134 offset = bits (insn, 0, 23) << 2;
6135
6136 if (bit (offset, 25))
6137 offset = offset | ~0x3ffffff;
6138
6139 dsc->modinsn[0] = ARM_NOP;
6140
6141 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6142 return 0;
6143 }
6144
6145 static int
6146 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6147 uint16_t insn2, struct regcache *regs,
6148 struct displaced_step_closure *dsc)
6149 {
6150 int link = bit (insn2, 14);
6151 int exchange = link && !bit (insn2, 12);
6152 int cond = INST_AL;
6153 long offset = 0;
6154 int j1 = bit (insn2, 13);
6155 int j2 = bit (insn2, 11);
6156 int s = sbits (insn1, 10, 10);
6157 int i1 = !(j1 ^ bit (insn1, 10));
6158 int i2 = !(j2 ^ bit (insn1, 10));
6159
6160 if (!link && !exchange) /* B */
6161 {
6162 offset = (bits (insn2, 0, 10) << 1);
6163 if (bit (insn2, 12)) /* Encoding T4 */
6164 {
6165 offset |= (bits (insn1, 0, 9) << 12)
6166 | (i2 << 22)
6167 | (i1 << 23)
6168 | (s << 24);
6169 cond = INST_AL;
6170 }
6171 else /* Encoding T3 */
6172 {
6173 offset |= (bits (insn1, 0, 5) << 12)
6174 | (j1 << 18)
6175 | (j2 << 19)
6176 | (s << 20);
6177 cond = bits (insn1, 6, 9);
6178 }
6179 }
6180 else
6181 {
6182 offset = (bits (insn1, 0, 9) << 12);
6183 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6184 offset |= exchange ?
6185 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6186 }
6187
6188 if (debug_displaced)
6189 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6190 "%.4x %.4x with offset %.8lx\n",
6191 link ? (exchange) ? "blx" : "bl" : "b",
6192 insn1, insn2, offset);
6193
6194 dsc->modinsn[0] = THUMB_NOP;
6195
6196 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6197 return 0;
6198 }
6199
6200 /* Copy B Thumb instructions. */
6201 static int
6202 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6203 struct displaced_step_closure *dsc)
6204 {
6205 unsigned int cond = 0;
6206 int offset = 0;
6207 unsigned short bit_12_15 = bits (insn, 12, 15);
6208 CORE_ADDR from = dsc->insn_addr;
6209
6210 if (bit_12_15 == 0xd)
6211 {
6212 /* offset = SignExtend (imm8:0, 32) */
6213 offset = sbits ((insn << 1), 0, 8);
6214 cond = bits (insn, 8, 11);
6215 }
6216 else if (bit_12_15 == 0xe) /* Encoding T2 */
6217 {
6218 offset = sbits ((insn << 1), 0, 11);
6219 cond = INST_AL;
6220 }
6221
6222 if (debug_displaced)
6223 fprintf_unfiltered (gdb_stdlog,
6224 "displaced: copying b immediate insn %.4x "
6225 "with offset %d\n", insn, offset);
6226
6227 dsc->u.branch.cond = cond;
6228 dsc->u.branch.link = 0;
6229 dsc->u.branch.exchange = 0;
6230 dsc->u.branch.dest = from + 4 + offset;
6231
6232 dsc->modinsn[0] = THUMB_NOP;
6233
6234 dsc->cleanup = &cleanup_branch;
6235
6236 return 0;
6237 }
6238
6239 /* Copy BX/BLX with register-specified destinations. */
6240
6241 static void
6242 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6243 struct displaced_step_closure *dsc, int link,
6244 unsigned int cond, unsigned int rm)
6245 {
6246 /* Implement {BX,BLX}<cond> <reg>" as:
6247
6248 Preparation: cond <- instruction condition
6249 Insn: mov r0, r0 (nop)
6250 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6251
6252 Don't set r14 in cleanup for BX. */
6253
6254 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6255
6256 dsc->u.branch.cond = cond;
6257 dsc->u.branch.link = link;
6258
6259 dsc->u.branch.exchange = 1;
6260
6261 dsc->cleanup = &cleanup_branch;
6262 }
6263
6264 static int
6265 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6266 struct regcache *regs, struct displaced_step_closure *dsc)
6267 {
6268 unsigned int cond = bits (insn, 28, 31);
6269 /* BX: x12xxx1x
6270 BLX: x12xxx3x. */
6271 int link = bit (insn, 5);
6272 unsigned int rm = bits (insn, 0, 3);
6273
6274 if (debug_displaced)
6275 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6276 (unsigned long) insn);
6277
6278 dsc->modinsn[0] = ARM_NOP;
6279
6280 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6281 return 0;
6282 }
6283
6284 static int
6285 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6286 struct regcache *regs,
6287 struct displaced_step_closure *dsc)
6288 {
6289 int link = bit (insn, 7);
6290 unsigned int rm = bits (insn, 3, 6);
6291
6292 if (debug_displaced)
6293 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6294 (unsigned short) insn);
6295
6296 dsc->modinsn[0] = THUMB_NOP;
6297
6298 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6299
6300 return 0;
6301 }
6302
6303
6304 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6305
6306 static void
6307 cleanup_alu_imm (struct gdbarch *gdbarch,
6308 struct regcache *regs, struct displaced_step_closure *dsc)
6309 {
6310 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6311 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6312 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6313 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6314 }
6315
6316 static int
6317 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6318 struct displaced_step_closure *dsc)
6319 {
6320 unsigned int rn = bits (insn, 16, 19);
6321 unsigned int rd = bits (insn, 12, 15);
6322 unsigned int op = bits (insn, 21, 24);
6323 int is_mov = (op == 0xd);
6324 ULONGEST rd_val, rn_val;
6325
6326 if (!insn_references_pc (insn, 0x000ff000ul))
6327 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6328
6329 if (debug_displaced)
6330 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6331 "%.8lx\n", is_mov ? "move" : "ALU",
6332 (unsigned long) insn);
6333
6334 /* Instruction is of form:
6335
6336 <op><cond> rd, [rn,] #imm
6337
6338 Rewrite as:
6339
6340 Preparation: tmp1, tmp2 <- r0, r1;
6341 r0, r1 <- rd, rn
6342 Insn: <op><cond> r0, r1, #imm
6343 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6344 */
6345
6346 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6347 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6348 rn_val = displaced_read_reg (regs, dsc, rn);
6349 rd_val = displaced_read_reg (regs, dsc, rd);
6350 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6351 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6352 dsc->rd = rd;
6353
6354 if (is_mov)
6355 dsc->modinsn[0] = insn & 0xfff00fff;
6356 else
6357 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6358
6359 dsc->cleanup = &cleanup_alu_imm;
6360
6361 return 0;
6362 }
6363
6364 static int
6365 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6366 uint16_t insn2, struct regcache *regs,
6367 struct displaced_step_closure *dsc)
6368 {
6369 unsigned int op = bits (insn1, 5, 8);
6370 unsigned int rn, rm, rd;
6371 ULONGEST rd_val, rn_val;
6372
6373 rn = bits (insn1, 0, 3); /* Rn */
6374 rm = bits (insn2, 0, 3); /* Rm */
6375 rd = bits (insn2, 8, 11); /* Rd */
6376
6377 /* This routine is only called for instruction MOV. */
6378 gdb_assert (op == 0x2 && rn == 0xf);
6379
6380 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6381 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6382
6383 if (debug_displaced)
6384 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6385 "ALU", insn1, insn2);
6386
6387 /* Instruction is of form:
6388
6389 <op><cond> rd, [rn,] #imm
6390
6391 Rewrite as:
6392
6393 Preparation: tmp1, tmp2 <- r0, r1;
6394 r0, r1 <- rd, rn
6395 Insn: <op><cond> r0, r1, #imm
6396 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6397 */
6398
6399 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6400 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6401 rn_val = displaced_read_reg (regs, dsc, rn);
6402 rd_val = displaced_read_reg (regs, dsc, rd);
6403 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6404 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6405 dsc->rd = rd;
6406
6407 dsc->modinsn[0] = insn1;
6408 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6409 dsc->numinsns = 2;
6410
6411 dsc->cleanup = &cleanup_alu_imm;
6412
6413 return 0;
6414 }
6415
6416 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6417
6418 static void
6419 cleanup_alu_reg (struct gdbarch *gdbarch,
6420 struct regcache *regs, struct displaced_step_closure *dsc)
6421 {
6422 ULONGEST rd_val;
6423 int i;
6424
6425 rd_val = displaced_read_reg (regs, dsc, 0);
6426
6427 for (i = 0; i < 3; i++)
6428 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6429
6430 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6431 }
6432
6433 static void
6434 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6435 struct displaced_step_closure *dsc,
6436 unsigned int rd, unsigned int rn, unsigned int rm)
6437 {
6438 ULONGEST rd_val, rn_val, rm_val;
6439
6440 /* Instruction is of form:
6441
6442 <op><cond> rd, [rn,] rm [, <shift>]
6443
6444 Rewrite as:
6445
6446 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6447 r0, r1, r2 <- rd, rn, rm
6448 Insn: <op><cond> r0, r1, r2 [, <shift>]
6449 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6450 */
6451
6452 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6453 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6454 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6455 rd_val = displaced_read_reg (regs, dsc, rd);
6456 rn_val = displaced_read_reg (regs, dsc, rn);
6457 rm_val = displaced_read_reg (regs, dsc, rm);
6458 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6459 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6460 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6461 dsc->rd = rd;
6462
6463 dsc->cleanup = &cleanup_alu_reg;
6464 }
6465
6466 static int
6467 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6468 struct displaced_step_closure *dsc)
6469 {
6470 unsigned int op = bits (insn, 21, 24);
6471 int is_mov = (op == 0xd);
6472
6473 if (!insn_references_pc (insn, 0x000ff00ful))
6474 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6475
6476 if (debug_displaced)
6477 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6478 is_mov ? "move" : "ALU", (unsigned long) insn);
6479
6480 if (is_mov)
6481 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6482 else
6483 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6484
6485 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6486 bits (insn, 0, 3));
6487 return 0;
6488 }
6489
6490 static int
6491 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6492 struct regcache *regs,
6493 struct displaced_step_closure *dsc)
6494 {
6495 unsigned rn, rm, rd;
6496
6497 rd = bits (insn, 3, 6);
6498 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6499 rm = 2;
6500
6501 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6502 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6503
6504 if (debug_displaced)
6505 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6506 "ALU", (unsigned short) insn);
6507
6508 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6509
6510 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6511
6512 return 0;
6513 }
6514
6515 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6516
6517 static void
6518 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6519 struct regcache *regs,
6520 struct displaced_step_closure *dsc)
6521 {
6522 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6523 int i;
6524
6525 for (i = 0; i < 4; i++)
6526 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6527
6528 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6529 }
6530
6531 static void
6532 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6533 struct displaced_step_closure *dsc,
6534 unsigned int rd, unsigned int rn, unsigned int rm,
6535 unsigned rs)
6536 {
6537 int i;
6538 ULONGEST rd_val, rn_val, rm_val, rs_val;
6539
6540 /* Instruction is of form:
6541
6542 <op><cond> rd, [rn,] rm, <shift> rs
6543
6544 Rewrite as:
6545
6546 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6547 r0, r1, r2, r3 <- rd, rn, rm, rs
6548 Insn: <op><cond> r0, r1, r2, <shift> r3
6549 Cleanup: tmp5 <- r0
6550 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6551 rd <- tmp5
6552 */
6553
6554 for (i = 0; i < 4; i++)
6555 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6556
6557 rd_val = displaced_read_reg (regs, dsc, rd);
6558 rn_val = displaced_read_reg (regs, dsc, rn);
6559 rm_val = displaced_read_reg (regs, dsc, rm);
6560 rs_val = displaced_read_reg (regs, dsc, rs);
6561 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6562 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6563 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6564 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6565 dsc->rd = rd;
6566 dsc->cleanup = &cleanup_alu_shifted_reg;
6567 }
6568
6569 static int
6570 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6571 struct regcache *regs,
6572 struct displaced_step_closure *dsc)
6573 {
6574 unsigned int op = bits (insn, 21, 24);
6575 int is_mov = (op == 0xd);
6576 unsigned int rd, rn, rm, rs;
6577
6578 if (!insn_references_pc (insn, 0x000fff0ful))
6579 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6580
6581 if (debug_displaced)
6582 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6583 "%.8lx\n", is_mov ? "move" : "ALU",
6584 (unsigned long) insn);
6585
6586 rn = bits (insn, 16, 19);
6587 rm = bits (insn, 0, 3);
6588 rs = bits (insn, 8, 11);
6589 rd = bits (insn, 12, 15);
6590
6591 if (is_mov)
6592 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6593 else
6594 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6595
6596 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6597
6598 return 0;
6599 }
6600
6601 /* Clean up load instructions. */
6602
6603 static void
6604 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6605 struct displaced_step_closure *dsc)
6606 {
6607 ULONGEST rt_val, rt_val2 = 0, rn_val;
6608
6609 rt_val = displaced_read_reg (regs, dsc, 0);
6610 if (dsc->u.ldst.xfersize == 8)
6611 rt_val2 = displaced_read_reg (regs, dsc, 1);
6612 rn_val = displaced_read_reg (regs, dsc, 2);
6613
6614 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6615 if (dsc->u.ldst.xfersize > 4)
6616 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6617 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6618 if (!dsc->u.ldst.immed)
6619 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6620
6621 /* Handle register writeback. */
6622 if (dsc->u.ldst.writeback)
6623 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6624 /* Put result in right place. */
6625 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6626 if (dsc->u.ldst.xfersize == 8)
6627 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6628 }
6629
6630 /* Clean up store instructions. */
6631
6632 static void
6633 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6634 struct displaced_step_closure *dsc)
6635 {
6636 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6637
6638 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6639 if (dsc->u.ldst.xfersize > 4)
6640 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6641 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6642 if (!dsc->u.ldst.immed)
6643 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6644 if (!dsc->u.ldst.restore_r4)
6645 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6646
6647 /* Writeback. */
6648 if (dsc->u.ldst.writeback)
6649 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6650 }
6651
6652 /* Copy "extra" load/store instructions. These are halfword/doubleword
6653 transfers, which have a different encoding to byte/word transfers. */
6654
6655 static int
6656 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6657 struct regcache *regs, struct displaced_step_closure *dsc)
6658 {
6659 unsigned int op1 = bits (insn, 20, 24);
6660 unsigned int op2 = bits (insn, 5, 6);
6661 unsigned int rt = bits (insn, 12, 15);
6662 unsigned int rn = bits (insn, 16, 19);
6663 unsigned int rm = bits (insn, 0, 3);
6664 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6665 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6666 int immed = (op1 & 0x4) != 0;
6667 int opcode;
6668 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6669
6670 if (!insn_references_pc (insn, 0x000ff00ful))
6671 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6672
6673 if (debug_displaced)
6674 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6675 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6676 (unsigned long) insn);
6677
6678 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6679
6680 if (opcode < 0)
6681 internal_error (__FILE__, __LINE__,
6682 _("copy_extra_ld_st: instruction decode error"));
6683
6684 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6685 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6686 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6687 if (!immed)
6688 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6689
6690 rt_val = displaced_read_reg (regs, dsc, rt);
6691 if (bytesize[opcode] == 8)
6692 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6693 rn_val = displaced_read_reg (regs, dsc, rn);
6694 if (!immed)
6695 rm_val = displaced_read_reg (regs, dsc, rm);
6696
6697 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6698 if (bytesize[opcode] == 8)
6699 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6700 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6701 if (!immed)
6702 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6703
6704 dsc->rd = rt;
6705 dsc->u.ldst.xfersize = bytesize[opcode];
6706 dsc->u.ldst.rn = rn;
6707 dsc->u.ldst.immed = immed;
6708 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6709 dsc->u.ldst.restore_r4 = 0;
6710
6711 if (immed)
6712 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6713 ->
6714 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6715 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6716 else
6717 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6718 ->
6719 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6720 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6721
6722 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6723
6724 return 0;
6725 }
6726
6727 /* Copy byte/half word/word loads and stores. */
6728
6729 static void
6730 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6731 struct displaced_step_closure *dsc, int load,
6732 int immed, int writeback, int size, int usermode,
6733 int rt, int rm, int rn)
6734 {
6735 ULONGEST rt_val, rn_val, rm_val = 0;
6736
6737 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6738 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6739 if (!immed)
6740 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6741 if (!load)
6742 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6743
6744 rt_val = displaced_read_reg (regs, dsc, rt);
6745 rn_val = displaced_read_reg (regs, dsc, rn);
6746 if (!immed)
6747 rm_val = displaced_read_reg (regs, dsc, rm);
6748
6749 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6750 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6751 if (!immed)
6752 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6753 dsc->rd = rt;
6754 dsc->u.ldst.xfersize = size;
6755 dsc->u.ldst.rn = rn;
6756 dsc->u.ldst.immed = immed;
6757 dsc->u.ldst.writeback = writeback;
6758
6759 /* To write PC we can do:
6760
6761 Before this sequence of instructions:
6762 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6763 r2 is the Rn value got from dispalced_read_reg.
6764
6765 Insn1: push {pc} Write address of STR instruction + offset on stack
6766 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6767 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6768 = addr(Insn1) + offset - addr(Insn3) - 8
6769 = offset - 16
6770 Insn4: add r4, r4, #8 r4 = offset - 8
6771 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6772 = from + offset
6773 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6774
6775 Otherwise we don't know what value to write for PC, since the offset is
6776 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6777 of this can be found in Section "Saving from r15" in
6778 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6779
6780 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6781 }
6782
6783
6784 static int
6785 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6786 uint16_t insn2, struct regcache *regs,
6787 struct displaced_step_closure *dsc, int size)
6788 {
6789 unsigned int u_bit = bit (insn1, 7);
6790 unsigned int rt = bits (insn2, 12, 15);
6791 int imm12 = bits (insn2, 0, 11);
6792 ULONGEST pc_val;
6793
6794 if (debug_displaced)
6795 fprintf_unfiltered (gdb_stdlog,
6796 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6797 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6798 imm12);
6799
6800 if (!u_bit)
6801 imm12 = -1 * imm12;
6802
6803 /* Rewrite instruction LDR Rt imm12 into:
6804
6805 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6806
6807 LDR R0, R2, R3,
6808
6809 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6810
6811
6812 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6813 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6814 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6815
6816 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6817
6818 pc_val = pc_val & 0xfffffffc;
6819
6820 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6821 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6822
6823 dsc->rd = rt;
6824
6825 dsc->u.ldst.xfersize = size;
6826 dsc->u.ldst.immed = 0;
6827 dsc->u.ldst.writeback = 0;
6828 dsc->u.ldst.restore_r4 = 0;
6829
6830 /* LDR R0, R2, R3 */
6831 dsc->modinsn[0] = 0xf852;
6832 dsc->modinsn[1] = 0x3;
6833 dsc->numinsns = 2;
6834
6835 dsc->cleanup = &cleanup_load;
6836
6837 return 0;
6838 }
6839
6840 static int
6841 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6842 uint16_t insn2, struct regcache *regs,
6843 struct displaced_step_closure *dsc,
6844 int writeback, int immed)
6845 {
6846 unsigned int rt = bits (insn2, 12, 15);
6847 unsigned int rn = bits (insn1, 0, 3);
6848 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6849 /* In LDR (register), there is also a register Rm, which is not allowed to
6850 be PC, so we don't have to check it. */
6851
6852 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6853 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6854 dsc);
6855
6856 if (debug_displaced)
6857 fprintf_unfiltered (gdb_stdlog,
6858 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6859 rt, rn, insn1, insn2);
6860
6861 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6862 0, rt, rm, rn);
6863
6864 dsc->u.ldst.restore_r4 = 0;
6865
6866 if (immed)
6867 /* ldr[b]<cond> rt, [rn, #imm], etc.
6868 ->
6869 ldr[b]<cond> r0, [r2, #imm]. */
6870 {
6871 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6872 dsc->modinsn[1] = insn2 & 0x0fff;
6873 }
6874 else
6875 /* ldr[b]<cond> rt, [rn, rm], etc.
6876 ->
6877 ldr[b]<cond> r0, [r2, r3]. */
6878 {
6879 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6880 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6881 }
6882
6883 dsc->numinsns = 2;
6884
6885 return 0;
6886 }
6887
6888
6889 static int
6890 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6891 struct regcache *regs,
6892 struct displaced_step_closure *dsc,
6893 int load, int size, int usermode)
6894 {
6895 int immed = !bit (insn, 25);
6896 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6897 unsigned int rt = bits (insn, 12, 15);
6898 unsigned int rn = bits (insn, 16, 19);
6899 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6900
6901 if (!insn_references_pc (insn, 0x000ff00ful))
6902 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6903
6904 if (debug_displaced)
6905 fprintf_unfiltered (gdb_stdlog,
6906 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6907 load ? (size == 1 ? "ldrb" : "ldr")
6908 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6909 rt, rn,
6910 (unsigned long) insn);
6911
6912 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6913 usermode, rt, rm, rn);
6914
6915 if (load || rt != ARM_PC_REGNUM)
6916 {
6917 dsc->u.ldst.restore_r4 = 0;
6918
6919 if (immed)
6920 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6921 ->
6922 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6923 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6924 else
6925 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6926 ->
6927 {ldr,str}[b]<cond> r0, [r2, r3]. */
6928 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6929 }
6930 else
6931 {
6932 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6933 dsc->u.ldst.restore_r4 = 1;
6934 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6935 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6936 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6937 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6938 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6939
6940 /* As above. */
6941 if (immed)
6942 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6943 else
6944 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6945
6946 dsc->numinsns = 6;
6947 }
6948
6949 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6950
6951 return 0;
6952 }
6953
6954 /* Cleanup LDM instructions with fully-populated register list. This is an
6955 unfortunate corner case: it's impossible to implement correctly by modifying
6956 the instruction. The issue is as follows: we have an instruction,
6957
6958 ldm rN, {r0-r15}
6959
6960 which we must rewrite to avoid loading PC. A possible solution would be to
6961 do the load in two halves, something like (with suitable cleanup
6962 afterwards):
6963
6964 mov r8, rN
6965 ldm[id][ab] r8!, {r0-r7}
6966 str r7, <temp>
6967 ldm[id][ab] r8, {r7-r14}
6968 <bkpt>
6969
6970 but at present there's no suitable place for <temp>, since the scratch space
6971 is overwritten before the cleanup routine is called. For now, we simply
6972 emulate the instruction. */
6973
6974 static void
6975 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6976 struct displaced_step_closure *dsc)
6977 {
6978 int inc = dsc->u.block.increment;
6979 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6980 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6981 uint32_t regmask = dsc->u.block.regmask;
6982 int regno = inc ? 0 : 15;
6983 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6984 int exception_return = dsc->u.block.load && dsc->u.block.user
6985 && (regmask & 0x8000) != 0;
6986 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6987 int do_transfer = condition_true (dsc->u.block.cond, status);
6988 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6989
6990 if (!do_transfer)
6991 return;
6992
6993 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6994 sensible we can do here. Complain loudly. */
6995 if (exception_return)
6996 error (_("Cannot single-step exception return"));
6997
6998 /* We don't handle any stores here for now. */
6999 gdb_assert (dsc->u.block.load != 0);
7000
7001 if (debug_displaced)
7002 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
7003 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
7004 dsc->u.block.increment ? "inc" : "dec",
7005 dsc->u.block.before ? "before" : "after");
7006
7007 while (regmask)
7008 {
7009 uint32_t memword;
7010
7011 if (inc)
7012 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
7013 regno++;
7014 else
7015 while (regno >= 0 && (regmask & (1 << regno)) == 0)
7016 regno--;
7017
7018 xfer_addr += bump_before;
7019
7020 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
7021 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
7022
7023 xfer_addr += bump_after;
7024
7025 regmask &= ~(1 << regno);
7026 }
7027
7028 if (dsc->u.block.writeback)
7029 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
7030 CANNOT_WRITE_PC);
7031 }
7032
7033 /* Clean up an STM which included the PC in the register list. */
7034
7035 static void
7036 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7037 struct displaced_step_closure *dsc)
7038 {
7039 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7040 int store_executed = condition_true (dsc->u.block.cond, status);
7041 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7042 CORE_ADDR stm_insn_addr;
7043 uint32_t pc_val;
7044 long offset;
7045 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7046
7047 /* If condition code fails, there's nothing else to do. */
7048 if (!store_executed)
7049 return;
7050
7051 if (dsc->u.block.increment)
7052 {
7053 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7054
7055 if (dsc->u.block.before)
7056 pc_stored_at += 4;
7057 }
7058 else
7059 {
7060 pc_stored_at = dsc->u.block.xfer_addr;
7061
7062 if (dsc->u.block.before)
7063 pc_stored_at -= 4;
7064 }
7065
7066 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7067 stm_insn_addr = dsc->scratch_base;
7068 offset = pc_val - stm_insn_addr;
7069
7070 if (debug_displaced)
7071 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7072 "STM instruction\n", offset);
7073
7074 /* Rewrite the stored PC to the proper value for the non-displaced original
7075 instruction. */
7076 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7077 dsc->insn_addr + offset);
7078 }
7079
7080 /* Clean up an LDM which includes the PC in the register list. We clumped all
7081 the registers in the transferred list into a contiguous range r0...rX (to
7082 avoid loading PC directly and losing control of the debugged program), so we
7083 must undo that here. */
7084
7085 static void
7086 cleanup_block_load_pc (struct gdbarch *gdbarch,
7087 struct regcache *regs,
7088 struct displaced_step_closure *dsc)
7089 {
7090 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7091 int load_executed = condition_true (dsc->u.block.cond, status);
7092 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7093 unsigned int regs_loaded = bitcount (mask);
7094 unsigned int num_to_shuffle = regs_loaded, clobbered;
7095
7096 /* The method employed here will fail if the register list is fully populated
7097 (we need to avoid loading PC directly). */
7098 gdb_assert (num_to_shuffle < 16);
7099
7100 if (!load_executed)
7101 return;
7102
7103 clobbered = (1 << num_to_shuffle) - 1;
7104
7105 while (num_to_shuffle > 0)
7106 {
7107 if ((mask & (1 << write_reg)) != 0)
7108 {
7109 unsigned int read_reg = num_to_shuffle - 1;
7110
7111 if (read_reg != write_reg)
7112 {
7113 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7114 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7115 if (debug_displaced)
7116 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7117 "loaded register r%d to r%d\n"), read_reg,
7118 write_reg);
7119 }
7120 else if (debug_displaced)
7121 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7122 "r%d already in the right place\n"),
7123 write_reg);
7124
7125 clobbered &= ~(1 << write_reg);
7126
7127 num_to_shuffle--;
7128 }
7129
7130 write_reg--;
7131 }
7132
7133 /* Restore any registers we scribbled over. */
7134 for (write_reg = 0; clobbered != 0; write_reg++)
7135 {
7136 if ((clobbered & (1 << write_reg)) != 0)
7137 {
7138 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7139 CANNOT_WRITE_PC);
7140 if (debug_displaced)
7141 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7142 "clobbered register r%d\n"), write_reg);
7143 clobbered &= ~(1 << write_reg);
7144 }
7145 }
7146
7147 /* Perform register writeback manually. */
7148 if (dsc->u.block.writeback)
7149 {
7150 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7151
7152 if (dsc->u.block.increment)
7153 new_rn_val += regs_loaded * 4;
7154 else
7155 new_rn_val -= regs_loaded * 4;
7156
7157 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7158 CANNOT_WRITE_PC);
7159 }
7160 }
7161
7162 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7163 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7164
7165 static int
7166 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7167 struct regcache *regs,
7168 struct displaced_step_closure *dsc)
7169 {
7170 int load = bit (insn, 20);
7171 int user = bit (insn, 22);
7172 int increment = bit (insn, 23);
7173 int before = bit (insn, 24);
7174 int writeback = bit (insn, 21);
7175 int rn = bits (insn, 16, 19);
7176
7177 /* Block transfers which don't mention PC can be run directly
7178 out-of-line. */
7179 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7180 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7181
7182 if (rn == ARM_PC_REGNUM)
7183 {
7184 warning (_("displaced: Unpredictable LDM or STM with "
7185 "base register r15"));
7186 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7187 }
7188
7189 if (debug_displaced)
7190 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7191 "%.8lx\n", (unsigned long) insn);
7192
7193 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7194 dsc->u.block.rn = rn;
7195
7196 dsc->u.block.load = load;
7197 dsc->u.block.user = user;
7198 dsc->u.block.increment = increment;
7199 dsc->u.block.before = before;
7200 dsc->u.block.writeback = writeback;
7201 dsc->u.block.cond = bits (insn, 28, 31);
7202
7203 dsc->u.block.regmask = insn & 0xffff;
7204
7205 if (load)
7206 {
7207 if ((insn & 0xffff) == 0xffff)
7208 {
7209 /* LDM with a fully-populated register list. This case is
7210 particularly tricky. Implement for now by fully emulating the
7211 instruction (which might not behave perfectly in all cases, but
7212 these instructions should be rare enough for that not to matter
7213 too much). */
7214 dsc->modinsn[0] = ARM_NOP;
7215
7216 dsc->cleanup = &cleanup_block_load_all;
7217 }
7218 else
7219 {
7220 /* LDM of a list of registers which includes PC. Implement by
7221 rewriting the list of registers to be transferred into a
7222 contiguous chunk r0...rX before doing the transfer, then shuffling
7223 registers into the correct places in the cleanup routine. */
7224 unsigned int regmask = insn & 0xffff;
7225 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7226 unsigned int to = 0, from = 0, i, new_rn;
7227
7228 for (i = 0; i < num_in_list; i++)
7229 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7230
7231 /* Writeback makes things complicated. We need to avoid clobbering
7232 the base register with one of the registers in our modified
7233 register list, but just using a different register can't work in
7234 all cases, e.g.:
7235
7236 ldm r14!, {r0-r13,pc}
7237
7238 which would need to be rewritten as:
7239
7240 ldm rN!, {r0-r14}
7241
7242 but that can't work, because there's no free register for N.
7243
7244 Solve this by turning off the writeback bit, and emulating
7245 writeback manually in the cleanup routine. */
7246
7247 if (writeback)
7248 insn &= ~(1 << 21);
7249
7250 new_regmask = (1 << num_in_list) - 1;
7251
7252 if (debug_displaced)
7253 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7254 "{..., pc}: original reg list %.4x, modified "
7255 "list %.4x\n"), rn, writeback ? "!" : "",
7256 (int) insn & 0xffff, new_regmask);
7257
7258 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7259
7260 dsc->cleanup = &cleanup_block_load_pc;
7261 }
7262 }
7263 else
7264 {
7265 /* STM of a list of registers which includes PC. Run the instruction
7266 as-is, but out of line: this will store the wrong value for the PC,
7267 so we must manually fix up the memory in the cleanup routine.
7268 Doing things this way has the advantage that we can auto-detect
7269 the offset of the PC write (which is architecture-dependent) in
7270 the cleanup routine. */
7271 dsc->modinsn[0] = insn;
7272
7273 dsc->cleanup = &cleanup_block_store_pc;
7274 }
7275
7276 return 0;
7277 }
7278
7279 static int
7280 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7281 struct regcache *regs,
7282 struct displaced_step_closure *dsc)
7283 {
7284 int rn = bits (insn1, 0, 3);
7285 int load = bit (insn1, 4);
7286 int writeback = bit (insn1, 5);
7287
7288 /* Block transfers which don't mention PC can be run directly
7289 out-of-line. */
7290 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7291 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7292
7293 if (rn == ARM_PC_REGNUM)
7294 {
7295 warning (_("displaced: Unpredictable LDM or STM with "
7296 "base register r15"));
7297 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7298 "unpredictable ldm/stm", dsc);
7299 }
7300
7301 if (debug_displaced)
7302 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7303 "%.4x%.4x\n", insn1, insn2);
7304
7305 /* Clear bit 13, since it should be always zero. */
7306 dsc->u.block.regmask = (insn2 & 0xdfff);
7307 dsc->u.block.rn = rn;
7308
7309 dsc->u.block.load = load;
7310 dsc->u.block.user = 0;
7311 dsc->u.block.increment = bit (insn1, 7);
7312 dsc->u.block.before = bit (insn1, 8);
7313 dsc->u.block.writeback = writeback;
7314 dsc->u.block.cond = INST_AL;
7315 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7316
7317 if (load)
7318 {
7319 if (dsc->u.block.regmask == 0xffff)
7320 {
7321 /* This branch is impossible to happen. */
7322 gdb_assert (0);
7323 }
7324 else
7325 {
7326 unsigned int regmask = dsc->u.block.regmask;
7327 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7328 unsigned int to = 0, from = 0, i, new_rn;
7329
7330 for (i = 0; i < num_in_list; i++)
7331 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7332
7333 if (writeback)
7334 insn1 &= ~(1 << 5);
7335
7336 new_regmask = (1 << num_in_list) - 1;
7337
7338 if (debug_displaced)
7339 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7340 "{..., pc}: original reg list %.4x, modified "
7341 "list %.4x\n"), rn, writeback ? "!" : "",
7342 (int) dsc->u.block.regmask, new_regmask);
7343
7344 dsc->modinsn[0] = insn1;
7345 dsc->modinsn[1] = (new_regmask & 0xffff);
7346 dsc->numinsns = 2;
7347
7348 dsc->cleanup = &cleanup_block_load_pc;
7349 }
7350 }
7351 else
7352 {
7353 dsc->modinsn[0] = insn1;
7354 dsc->modinsn[1] = insn2;
7355 dsc->numinsns = 2;
7356 dsc->cleanup = &cleanup_block_store_pc;
7357 }
7358 return 0;
7359 }
7360
7361 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7362 for Linux, where some SVC instructions must be treated specially. */
7363
7364 static void
7365 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7366 struct displaced_step_closure *dsc)
7367 {
7368 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7369
7370 if (debug_displaced)
7371 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7372 "%.8lx\n", (unsigned long) resume_addr);
7373
7374 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7375 }
7376
7377
7378 /* Common copy routine for svc instruciton. */
7379
7380 static int
7381 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7382 struct displaced_step_closure *dsc)
7383 {
7384 /* Preparation: none.
7385 Insn: unmodified svc.
7386 Cleanup: pc <- insn_addr + insn_size. */
7387
7388 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7389 instruction. */
7390 dsc->wrote_to_pc = 1;
7391
7392 /* Allow OS-specific code to override SVC handling. */
7393 if (dsc->u.svc.copy_svc_os)
7394 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7395 else
7396 {
7397 dsc->cleanup = &cleanup_svc;
7398 return 0;
7399 }
7400 }
7401
7402 static int
7403 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7404 struct regcache *regs, struct displaced_step_closure *dsc)
7405 {
7406
7407 if (debug_displaced)
7408 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7409 (unsigned long) insn);
7410
7411 dsc->modinsn[0] = insn;
7412
7413 return install_svc (gdbarch, regs, dsc);
7414 }
7415
7416 static int
7417 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7418 struct regcache *regs, struct displaced_step_closure *dsc)
7419 {
7420
7421 if (debug_displaced)
7422 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7423 insn);
7424
7425 dsc->modinsn[0] = insn;
7426
7427 return install_svc (gdbarch, regs, dsc);
7428 }
7429
7430 /* Copy undefined instructions. */
7431
7432 static int
7433 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7434 struct displaced_step_closure *dsc)
7435 {
7436 if (debug_displaced)
7437 fprintf_unfiltered (gdb_stdlog,
7438 "displaced: copying undefined insn %.8lx\n",
7439 (unsigned long) insn);
7440
7441 dsc->modinsn[0] = insn;
7442
7443 return 0;
7444 }
7445
7446 static int
7447 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7448 struct displaced_step_closure *dsc)
7449 {
7450
7451 if (debug_displaced)
7452 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7453 "%.4x %.4x\n", (unsigned short) insn1,
7454 (unsigned short) insn2);
7455
7456 dsc->modinsn[0] = insn1;
7457 dsc->modinsn[1] = insn2;
7458 dsc->numinsns = 2;
7459
7460 return 0;
7461 }
7462
7463 /* Copy unpredictable instructions. */
7464
7465 static int
7466 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7467 struct displaced_step_closure *dsc)
7468 {
7469 if (debug_displaced)
7470 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7471 "%.8lx\n", (unsigned long) insn);
7472
7473 dsc->modinsn[0] = insn;
7474
7475 return 0;
7476 }
7477
7478 /* The decode_* functions are instruction decoding helpers. They mostly follow
7479 the presentation in the ARM ARM. */
7480
7481 static int
7482 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7483 struct regcache *regs,
7484 struct displaced_step_closure *dsc)
7485 {
7486 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7487 unsigned int rn = bits (insn, 16, 19);
7488
7489 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7490 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7491 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7492 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7493 else if ((op1 & 0x60) == 0x20)
7494 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7495 else if ((op1 & 0x71) == 0x40)
7496 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7497 dsc);
7498 else if ((op1 & 0x77) == 0x41)
7499 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7500 else if ((op1 & 0x77) == 0x45)
7501 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7502 else if ((op1 & 0x77) == 0x51)
7503 {
7504 if (rn != 0xf)
7505 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7506 else
7507 return arm_copy_unpred (gdbarch, insn, dsc);
7508 }
7509 else if ((op1 & 0x77) == 0x55)
7510 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7511 else if (op1 == 0x57)
7512 switch (op2)
7513 {
7514 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7515 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7516 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7517 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7518 default: return arm_copy_unpred (gdbarch, insn, dsc);
7519 }
7520 else if ((op1 & 0x63) == 0x43)
7521 return arm_copy_unpred (gdbarch, insn, dsc);
7522 else if ((op2 & 0x1) == 0x0)
7523 switch (op1 & ~0x80)
7524 {
7525 case 0x61:
7526 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7527 case 0x65:
7528 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7529 case 0x71: case 0x75:
7530 /* pld/pldw reg. */
7531 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7532 case 0x63: case 0x67: case 0x73: case 0x77:
7533 return arm_copy_unpred (gdbarch, insn, dsc);
7534 default:
7535 return arm_copy_undef (gdbarch, insn, dsc);
7536 }
7537 else
7538 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7539 }
7540
7541 static int
7542 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7543 struct regcache *regs,
7544 struct displaced_step_closure *dsc)
7545 {
7546 if (bit (insn, 27) == 0)
7547 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7548 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7549 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7550 {
7551 case 0x0: case 0x2:
7552 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7553
7554 case 0x1: case 0x3:
7555 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7556
7557 case 0x4: case 0x5: case 0x6: case 0x7:
7558 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7559
7560 case 0x8:
7561 switch ((insn & 0xe00000) >> 21)
7562 {
7563 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7564 /* stc/stc2. */
7565 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7566
7567 case 0x2:
7568 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7569
7570 default:
7571 return arm_copy_undef (gdbarch, insn, dsc);
7572 }
7573
7574 case 0x9:
7575 {
7576 int rn_f = (bits (insn, 16, 19) == 0xf);
7577 switch ((insn & 0xe00000) >> 21)
7578 {
7579 case 0x1: case 0x3:
7580 /* ldc/ldc2 imm (undefined for rn == pc). */
7581 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7582 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7583
7584 case 0x2:
7585 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7586
7587 case 0x4: case 0x5: case 0x6: case 0x7:
7588 /* ldc/ldc2 lit (undefined for rn != pc). */
7589 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7590 : arm_copy_undef (gdbarch, insn, dsc);
7591
7592 default:
7593 return arm_copy_undef (gdbarch, insn, dsc);
7594 }
7595 }
7596
7597 case 0xa:
7598 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7599
7600 case 0xb:
7601 if (bits (insn, 16, 19) == 0xf)
7602 /* ldc/ldc2 lit. */
7603 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7604 else
7605 return arm_copy_undef (gdbarch, insn, dsc);
7606
7607 case 0xc:
7608 if (bit (insn, 4))
7609 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7610 else
7611 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7612
7613 case 0xd:
7614 if (bit (insn, 4))
7615 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7616 else
7617 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7618
7619 default:
7620 return arm_copy_undef (gdbarch, insn, dsc);
7621 }
7622 }
7623
7624 /* Decode miscellaneous instructions in dp/misc encoding space. */
7625
7626 static int
7627 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7628 struct regcache *regs,
7629 struct displaced_step_closure *dsc)
7630 {
7631 unsigned int op2 = bits (insn, 4, 6);
7632 unsigned int op = bits (insn, 21, 22);
7633 unsigned int op1 = bits (insn, 16, 19);
7634
7635 switch (op2)
7636 {
7637 case 0x0:
7638 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7639
7640 case 0x1:
7641 if (op == 0x1) /* bx. */
7642 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7643 else if (op == 0x3)
7644 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7645 else
7646 return arm_copy_undef (gdbarch, insn, dsc);
7647
7648 case 0x2:
7649 if (op == 0x1)
7650 /* Not really supported. */
7651 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7652 else
7653 return arm_copy_undef (gdbarch, insn, dsc);
7654
7655 case 0x3:
7656 if (op == 0x1)
7657 return arm_copy_bx_blx_reg (gdbarch, insn,
7658 regs, dsc); /* blx register. */
7659 else
7660 return arm_copy_undef (gdbarch, insn, dsc);
7661
7662 case 0x5:
7663 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7664
7665 case 0x7:
7666 if (op == 0x1)
7667 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7668 else if (op == 0x3)
7669 /* Not really supported. */
7670 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7671
7672 default:
7673 return arm_copy_undef (gdbarch, insn, dsc);
7674 }
7675 }
7676
7677 static int
7678 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7679 struct regcache *regs,
7680 struct displaced_step_closure *dsc)
7681 {
7682 if (bit (insn, 25))
7683 switch (bits (insn, 20, 24))
7684 {
7685 case 0x10:
7686 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7687
7688 case 0x14:
7689 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7690
7691 case 0x12: case 0x16:
7692 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7693
7694 default:
7695 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7696 }
7697 else
7698 {
7699 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7700
7701 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7702 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7703 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7704 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7705 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7706 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7707 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7708 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7709 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7710 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7711 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7712 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7713 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7714 /* 2nd arg means "unpriveleged". */
7715 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7716 dsc);
7717 }
7718
7719 /* Should be unreachable. */
7720 return 1;
7721 }
7722
7723 static int
7724 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7725 struct regcache *regs,
7726 struct displaced_step_closure *dsc)
7727 {
7728 int a = bit (insn, 25), b = bit (insn, 4);
7729 uint32_t op1 = bits (insn, 20, 24);
7730 int rn_f = bits (insn, 16, 19) == 0xf;
7731
7732 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7733 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7734 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7735 else if ((!a && (op1 & 0x17) == 0x02)
7736 || (a && (op1 & 0x17) == 0x02 && !b))
7737 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7738 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7739 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7740 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7741 else if ((!a && (op1 & 0x17) == 0x03)
7742 || (a && (op1 & 0x17) == 0x03 && !b))
7743 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7744 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7745 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7746 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7747 else if ((!a && (op1 & 0x17) == 0x06)
7748 || (a && (op1 & 0x17) == 0x06 && !b))
7749 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7750 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7751 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7752 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7753 else if ((!a && (op1 & 0x17) == 0x07)
7754 || (a && (op1 & 0x17) == 0x07 && !b))
7755 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7756
7757 /* Should be unreachable. */
7758 return 1;
7759 }
7760
7761 static int
7762 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7763 struct displaced_step_closure *dsc)
7764 {
7765 switch (bits (insn, 20, 24))
7766 {
7767 case 0x00: case 0x01: case 0x02: case 0x03:
7768 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7769
7770 case 0x04: case 0x05: case 0x06: case 0x07:
7771 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7772
7773 case 0x08: case 0x09: case 0x0a: case 0x0b:
7774 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7775 return arm_copy_unmodified (gdbarch, insn,
7776 "decode/pack/unpack/saturate/reverse", dsc);
7777
7778 case 0x18:
7779 if (bits (insn, 5, 7) == 0) /* op2. */
7780 {
7781 if (bits (insn, 12, 15) == 0xf)
7782 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7783 else
7784 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7785 }
7786 else
7787 return arm_copy_undef (gdbarch, insn, dsc);
7788
7789 case 0x1a: case 0x1b:
7790 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7791 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7792 else
7793 return arm_copy_undef (gdbarch, insn, dsc);
7794
7795 case 0x1c: case 0x1d:
7796 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7797 {
7798 if (bits (insn, 0, 3) == 0xf)
7799 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7800 else
7801 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7802 }
7803 else
7804 return arm_copy_undef (gdbarch, insn, dsc);
7805
7806 case 0x1e: case 0x1f:
7807 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7808 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7809 else
7810 return arm_copy_undef (gdbarch, insn, dsc);
7811 }
7812
7813 /* Should be unreachable. */
7814 return 1;
7815 }
7816
7817 static int
7818 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7819 struct regcache *regs,
7820 struct displaced_step_closure *dsc)
7821 {
7822 if (bit (insn, 25))
7823 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7824 else
7825 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7826 }
7827
7828 static int
7829 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7830 struct regcache *regs,
7831 struct displaced_step_closure *dsc)
7832 {
7833 unsigned int opcode = bits (insn, 20, 24);
7834
7835 switch (opcode)
7836 {
7837 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7838 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7839
7840 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7841 case 0x12: case 0x16:
7842 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7843
7844 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7845 case 0x13: case 0x17:
7846 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7847
7848 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7849 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7850 /* Note: no writeback for these instructions. Bit 25 will always be
7851 zero though (via caller), so the following works OK. */
7852 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7853 }
7854
7855 /* Should be unreachable. */
7856 return 1;
7857 }
7858
7859 /* Decode shifted register instructions. */
7860
7861 static int
7862 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7863 uint16_t insn2, struct regcache *regs,
7864 struct displaced_step_closure *dsc)
7865 {
7866 /* PC is only allowed to be used in instruction MOV. */
7867
7868 unsigned int op = bits (insn1, 5, 8);
7869 unsigned int rn = bits (insn1, 0, 3);
7870
7871 if (op == 0x2 && rn == 0xf) /* MOV */
7872 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7873 else
7874 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7875 "dp (shift reg)", dsc);
7876 }
7877
7878
7879 /* Decode extension register load/store. Exactly the same as
7880 arm_decode_ext_reg_ld_st. */
7881
7882 static int
7883 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7884 uint16_t insn2, struct regcache *regs,
7885 struct displaced_step_closure *dsc)
7886 {
7887 unsigned int opcode = bits (insn1, 4, 8);
7888
7889 switch (opcode)
7890 {
7891 case 0x04: case 0x05:
7892 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7893 "vfp/neon vmov", dsc);
7894
7895 case 0x08: case 0x0c: /* 01x00 */
7896 case 0x0a: case 0x0e: /* 01x10 */
7897 case 0x12: case 0x16: /* 10x10 */
7898 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7899 "vfp/neon vstm/vpush", dsc);
7900
7901 case 0x09: case 0x0d: /* 01x01 */
7902 case 0x0b: case 0x0f: /* 01x11 */
7903 case 0x13: case 0x17: /* 10x11 */
7904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7905 "vfp/neon vldm/vpop", dsc);
7906
7907 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7909 "vstr", dsc);
7910 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7911 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7912 }
7913
7914 /* Should be unreachable. */
7915 return 1;
7916 }
7917
7918 static int
7919 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7920 struct regcache *regs, struct displaced_step_closure *dsc)
7921 {
7922 unsigned int op1 = bits (insn, 20, 25);
7923 int op = bit (insn, 4);
7924 unsigned int coproc = bits (insn, 8, 11);
7925 unsigned int rn = bits (insn, 16, 19);
7926
7927 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7928 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7929 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7930 && (coproc & 0xe) != 0xa)
7931 /* stc/stc2. */
7932 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7933 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7934 && (coproc & 0xe) != 0xa)
7935 /* ldc/ldc2 imm/lit. */
7936 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7937 else if ((op1 & 0x3e) == 0x00)
7938 return arm_copy_undef (gdbarch, insn, dsc);
7939 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7940 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7941 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7942 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7943 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7944 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7945 else if ((op1 & 0x30) == 0x20 && !op)
7946 {
7947 if ((coproc & 0xe) == 0xa)
7948 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7949 else
7950 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7951 }
7952 else if ((op1 & 0x30) == 0x20 && op)
7953 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7954 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7955 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7956 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7957 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7958 else if ((op1 & 0x30) == 0x30)
7959 return arm_copy_svc (gdbarch, insn, regs, dsc);
7960 else
7961 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7962 }
7963
7964 static int
7965 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7966 uint16_t insn2, struct regcache *regs,
7967 struct displaced_step_closure *dsc)
7968 {
7969 unsigned int coproc = bits (insn2, 8, 11);
7970 unsigned int op1 = bits (insn1, 4, 9);
7971 unsigned int bit_5_8 = bits (insn1, 5, 8);
7972 unsigned int bit_9 = bit (insn1, 9);
7973 unsigned int bit_4 = bit (insn1, 4);
7974 unsigned int rn = bits (insn1, 0, 3);
7975
7976 if (bit_9 == 0)
7977 {
7978 if (bit_5_8 == 2)
7979 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7980 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7981 dsc);
7982 else if (bit_5_8 == 0) /* UNDEFINED. */
7983 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7984 else
7985 {
7986 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7987 if ((coproc & 0xe) == 0xa)
7988 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7989 dsc);
7990 else /* coproc is not 101x. */
7991 {
7992 if (bit_4 == 0) /* STC/STC2. */
7993 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7994 "stc/stc2", dsc);
7995 else /* LDC/LDC2 {literal, immeidate}. */
7996 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7997 regs, dsc);
7998 }
7999 }
8000 }
8001 else
8002 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
8003
8004 return 0;
8005 }
8006
8007 static void
8008 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
8009 struct displaced_step_closure *dsc, int rd)
8010 {
8011 /* ADR Rd, #imm
8012
8013 Rewrite as:
8014
8015 Preparation: Rd <- PC
8016 Insn: ADD Rd, #imm
8017 Cleanup: Null.
8018 */
8019
8020 /* Rd <- PC */
8021 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8022 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
8023 }
8024
8025 static int
8026 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
8027 struct displaced_step_closure *dsc,
8028 int rd, unsigned int imm)
8029 {
8030
8031 /* Encoding T2: ADDS Rd, #imm */
8032 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8033
8034 install_pc_relative (gdbarch, regs, dsc, rd);
8035
8036 return 0;
8037 }
8038
8039 static int
8040 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8041 struct regcache *regs,
8042 struct displaced_step_closure *dsc)
8043 {
8044 unsigned int rd = bits (insn, 8, 10);
8045 unsigned int imm8 = bits (insn, 0, 7);
8046
8047 if (debug_displaced)
8048 fprintf_unfiltered (gdb_stdlog,
8049 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8050 rd, imm8, insn);
8051
8052 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8053 }
8054
8055 static int
8056 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8057 uint16_t insn2, struct regcache *regs,
8058 struct displaced_step_closure *dsc)
8059 {
8060 unsigned int rd = bits (insn2, 8, 11);
8061 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8062 extract raw immediate encoding rather than computing immediate. When
8063 generating ADD or SUB instruction, we can simply perform OR operation to
8064 set immediate into ADD. */
8065 unsigned int imm_3_8 = insn2 & 0x70ff;
8066 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8067
8068 if (debug_displaced)
8069 fprintf_unfiltered (gdb_stdlog,
8070 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8071 rd, imm_i, imm_3_8, insn1, insn2);
8072
8073 if (bit (insn1, 7)) /* Encoding T2 */
8074 {
8075 /* Encoding T3: SUB Rd, Rd, #imm */
8076 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8077 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8078 }
8079 else /* Encoding T3 */
8080 {
8081 /* Encoding T3: ADD Rd, Rd, #imm */
8082 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8083 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8084 }
8085 dsc->numinsns = 2;
8086
8087 install_pc_relative (gdbarch, regs, dsc, rd);
8088
8089 return 0;
8090 }
8091
8092 static int
8093 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8094 struct regcache *regs,
8095 struct displaced_step_closure *dsc)
8096 {
8097 unsigned int rt = bits (insn1, 8, 10);
8098 unsigned int pc;
8099 int imm8 = (bits (insn1, 0, 7) << 2);
8100 CORE_ADDR from = dsc->insn_addr;
8101
8102 /* LDR Rd, #imm8
8103
8104 Rwrite as:
8105
8106 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8107
8108 Insn: LDR R0, [R2, R3];
8109 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8110
8111 if (debug_displaced)
8112 fprintf_unfiltered (gdb_stdlog,
8113 "displaced: copying thumb ldr r%d [pc #%d]\n"
8114 , rt, imm8);
8115
8116 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8117 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8118 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8119 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8120 /* The assembler calculates the required value of the offset from the
8121 Align(PC,4) value of this instruction to the label. */
8122 pc = pc & 0xfffffffc;
8123
8124 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8125 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8126
8127 dsc->rd = rt;
8128 dsc->u.ldst.xfersize = 4;
8129 dsc->u.ldst.rn = 0;
8130 dsc->u.ldst.immed = 0;
8131 dsc->u.ldst.writeback = 0;
8132 dsc->u.ldst.restore_r4 = 0;
8133
8134 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8135
8136 dsc->cleanup = &cleanup_load;
8137
8138 return 0;
8139 }
8140
8141 /* Copy Thumb cbnz/cbz insruction. */
8142
8143 static int
8144 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8145 struct regcache *regs,
8146 struct displaced_step_closure *dsc)
8147 {
8148 int non_zero = bit (insn1, 11);
8149 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8150 CORE_ADDR from = dsc->insn_addr;
8151 int rn = bits (insn1, 0, 2);
8152 int rn_val = displaced_read_reg (regs, dsc, rn);
8153
8154 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8155 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8156 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8157 condition is false, let it be, cleanup_branch will do nothing. */
8158 if (dsc->u.branch.cond)
8159 {
8160 dsc->u.branch.cond = INST_AL;
8161 dsc->u.branch.dest = from + 4 + imm5;
8162 }
8163 else
8164 dsc->u.branch.dest = from + 2;
8165
8166 dsc->u.branch.link = 0;
8167 dsc->u.branch.exchange = 0;
8168
8169 if (debug_displaced)
8170 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8171 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8172 rn, rn_val, insn1, dsc->u.branch.dest);
8173
8174 dsc->modinsn[0] = THUMB_NOP;
8175
8176 dsc->cleanup = &cleanup_branch;
8177 return 0;
8178 }
8179
8180 /* Copy Table Branch Byte/Halfword */
8181 static int
8182 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8183 uint16_t insn2, struct regcache *regs,
8184 struct displaced_step_closure *dsc)
8185 {
8186 ULONGEST rn_val, rm_val;
8187 int is_tbh = bit (insn2, 4);
8188 CORE_ADDR halfwords = 0;
8189 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8190
8191 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8192 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8193
8194 if (is_tbh)
8195 {
8196 gdb_byte buf[2];
8197
8198 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8199 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8200 }
8201 else
8202 {
8203 gdb_byte buf[1];
8204
8205 target_read_memory (rn_val + rm_val, buf, 1);
8206 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8207 }
8208
8209 if (debug_displaced)
8210 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8211 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8212 (unsigned int) rn_val, (unsigned int) rm_val,
8213 (unsigned int) halfwords);
8214
8215 dsc->u.branch.cond = INST_AL;
8216 dsc->u.branch.link = 0;
8217 dsc->u.branch.exchange = 0;
8218 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8219
8220 dsc->cleanup = &cleanup_branch;
8221
8222 return 0;
8223 }
8224
8225 static void
8226 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8227 struct displaced_step_closure *dsc)
8228 {
8229 /* PC <- r7 */
8230 int val = displaced_read_reg (regs, dsc, 7);
8231 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8232
8233 /* r7 <- r8 */
8234 val = displaced_read_reg (regs, dsc, 8);
8235 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8236
8237 /* r8 <- tmp[0] */
8238 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8239
8240 }
8241
8242 static int
8243 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8244 struct regcache *regs,
8245 struct displaced_step_closure *dsc)
8246 {
8247 dsc->u.block.regmask = insn1 & 0x00ff;
8248
8249 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8250 to :
8251
8252 (1) register list is full, that is, r0-r7 are used.
8253 Prepare: tmp[0] <- r8
8254
8255 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8256 MOV r8, r7; Move value of r7 to r8;
8257 POP {r7}; Store PC value into r7.
8258
8259 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8260
8261 (2) register list is not full, supposing there are N registers in
8262 register list (except PC, 0 <= N <= 7).
8263 Prepare: for each i, 0 - N, tmp[i] <- ri.
8264
8265 POP {r0, r1, ...., rN};
8266
8267 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8268 from tmp[] properly.
8269 */
8270 if (debug_displaced)
8271 fprintf_unfiltered (gdb_stdlog,
8272 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8273 dsc->u.block.regmask, insn1);
8274
8275 if (dsc->u.block.regmask == 0xff)
8276 {
8277 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8278
8279 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8280 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8281 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8282
8283 dsc->numinsns = 3;
8284 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8285 }
8286 else
8287 {
8288 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8289 unsigned int new_regmask, bit = 1;
8290 unsigned int to = 0, from = 0, i, new_rn;
8291
8292 for (i = 0; i < num_in_list + 1; i++)
8293 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8294
8295 new_regmask = (1 << (num_in_list + 1)) - 1;
8296
8297 if (debug_displaced)
8298 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8299 "{..., pc}: original reg list %.4x,"
8300 " modified list %.4x\n"),
8301 (int) dsc->u.block.regmask, new_regmask);
8302
8303 dsc->u.block.regmask |= 0x8000;
8304 dsc->u.block.writeback = 0;
8305 dsc->u.block.cond = INST_AL;
8306
8307 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8308
8309 dsc->cleanup = &cleanup_block_load_pc;
8310 }
8311
8312 return 0;
8313 }
8314
8315 static void
8316 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8317 struct regcache *regs,
8318 struct displaced_step_closure *dsc)
8319 {
8320 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8321 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8322 int err = 0;
8323
8324 /* 16-bit thumb instructions. */
8325 switch (op_bit_12_15)
8326 {
8327 /* Shift (imme), add, subtract, move and compare. */
8328 case 0: case 1: case 2: case 3:
8329 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8330 "shift/add/sub/mov/cmp",
8331 dsc);
8332 break;
8333 case 4:
8334 switch (op_bit_10_11)
8335 {
8336 case 0: /* Data-processing */
8337 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8338 "data-processing",
8339 dsc);
8340 break;
8341 case 1: /* Special data instructions and branch and exchange. */
8342 {
8343 unsigned short op = bits (insn1, 7, 9);
8344 if (op == 6 || op == 7) /* BX or BLX */
8345 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8346 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8347 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8348 else
8349 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8350 dsc);
8351 }
8352 break;
8353 default: /* LDR (literal) */
8354 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8355 }
8356 break;
8357 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8358 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8359 break;
8360 case 10:
8361 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8362 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8363 else /* Generate SP-relative address */
8364 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8365 break;
8366 case 11: /* Misc 16-bit instructions */
8367 {
8368 switch (bits (insn1, 8, 11))
8369 {
8370 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8371 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8372 break;
8373 case 12: case 13: /* POP */
8374 if (bit (insn1, 8)) /* PC is in register list. */
8375 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8376 else
8377 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8378 break;
8379 case 15: /* If-Then, and hints */
8380 if (bits (insn1, 0, 3))
8381 /* If-Then makes up to four following instructions conditional.
8382 IT instruction itself is not conditional, so handle it as a
8383 common unmodified instruction. */
8384 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8385 dsc);
8386 else
8387 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8388 break;
8389 default:
8390 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8391 }
8392 }
8393 break;
8394 case 12:
8395 if (op_bit_10_11 < 2) /* Store multiple registers */
8396 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8397 else /* Load multiple registers */
8398 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8399 break;
8400 case 13: /* Conditional branch and supervisor call */
8401 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8402 err = thumb_copy_b (gdbarch, insn1, dsc);
8403 else
8404 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8405 break;
8406 case 14: /* Unconditional branch */
8407 err = thumb_copy_b (gdbarch, insn1, dsc);
8408 break;
8409 default:
8410 err = 1;
8411 }
8412
8413 if (err)
8414 internal_error (__FILE__, __LINE__,
8415 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8416 }
8417
8418 static int
8419 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8420 uint16_t insn1, uint16_t insn2,
8421 struct regcache *regs,
8422 struct displaced_step_closure *dsc)
8423 {
8424 int rt = bits (insn2, 12, 15);
8425 int rn = bits (insn1, 0, 3);
8426 int op1 = bits (insn1, 7, 8);
8427 int err = 0;
8428
8429 switch (bits (insn1, 5, 6))
8430 {
8431 case 0: /* Load byte and memory hints */
8432 if (rt == 0xf) /* PLD/PLI */
8433 {
8434 if (rn == 0xf)
8435 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8436 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8437 else
8438 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8439 "pli/pld", dsc);
8440 }
8441 else
8442 {
8443 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8444 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8445 1);
8446 else
8447 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8448 "ldrb{reg, immediate}/ldrbt",
8449 dsc);
8450 }
8451
8452 break;
8453 case 1: /* Load halfword and memory hints. */
8454 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8455 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8456 "pld/unalloc memhint", dsc);
8457 else
8458 {
8459 if (rn == 0xf)
8460 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8461 2);
8462 else
8463 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8464 "ldrh/ldrht", dsc);
8465 }
8466 break;
8467 case 2: /* Load word */
8468 {
8469 int insn2_bit_8_11 = bits (insn2, 8, 11);
8470
8471 if (rn == 0xf)
8472 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8473 else if (op1 == 0x1) /* Encoding T3 */
8474 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8475 0, 1);
8476 else /* op1 == 0x0 */
8477 {
8478 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8479 /* LDR (immediate) */
8480 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8481 dsc, bit (insn2, 8), 1);
8482 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8483 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8484 "ldrt", dsc);
8485 else
8486 /* LDR (register) */
8487 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8488 dsc, 0, 0);
8489 }
8490 break;
8491 }
8492 default:
8493 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8494 break;
8495 }
8496 return 0;
8497 }
8498
8499 static void
8500 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8501 uint16_t insn2, struct regcache *regs,
8502 struct displaced_step_closure *dsc)
8503 {
8504 int err = 0;
8505 unsigned short op = bit (insn2, 15);
8506 unsigned int op1 = bits (insn1, 11, 12);
8507
8508 switch (op1)
8509 {
8510 case 1:
8511 {
8512 switch (bits (insn1, 9, 10))
8513 {
8514 case 0:
8515 if (bit (insn1, 6))
8516 {
8517 /* Load/store {dual, execlusive}, table branch. */
8518 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8519 && bits (insn2, 5, 7) == 0)
8520 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8521 dsc);
8522 else
8523 /* PC is not allowed to use in load/store {dual, exclusive}
8524 instructions. */
8525 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8526 "load/store dual/ex", dsc);
8527 }
8528 else /* load/store multiple */
8529 {
8530 switch (bits (insn1, 7, 8))
8531 {
8532 case 0: case 3: /* SRS, RFE */
8533 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8534 "srs/rfe", dsc);
8535 break;
8536 case 1: case 2: /* LDM/STM/PUSH/POP */
8537 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8538 break;
8539 }
8540 }
8541 break;
8542
8543 case 1:
8544 /* Data-processing (shift register). */
8545 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8546 dsc);
8547 break;
8548 default: /* Coprocessor instructions. */
8549 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8550 break;
8551 }
8552 break;
8553 }
8554 case 2: /* op1 = 2 */
8555 if (op) /* Branch and misc control. */
8556 {
8557 if (bit (insn2, 14) /* BLX/BL */
8558 || bit (insn2, 12) /* Unconditional branch */
8559 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8560 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8561 else
8562 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8563 "misc ctrl", dsc);
8564 }
8565 else
8566 {
8567 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8568 {
8569 int op = bits (insn1, 4, 8);
8570 int rn = bits (insn1, 0, 3);
8571 if ((op == 0 || op == 0xa) && rn == 0xf)
8572 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8573 regs, dsc);
8574 else
8575 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8576 "dp/pb", dsc);
8577 }
8578 else /* Data processing (modified immeidate) */
8579 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8580 "dp/mi", dsc);
8581 }
8582 break;
8583 case 3: /* op1 = 3 */
8584 switch (bits (insn1, 9, 10))
8585 {
8586 case 0:
8587 if (bit (insn1, 4))
8588 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8589 regs, dsc);
8590 else /* NEON Load/Store and Store single data item */
8591 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8592 "neon elt/struct load/store",
8593 dsc);
8594 break;
8595 case 1: /* op1 = 3, bits (9, 10) == 1 */
8596 switch (bits (insn1, 7, 8))
8597 {
8598 case 0: case 1: /* Data processing (register) */
8599 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8600 "dp(reg)", dsc);
8601 break;
8602 case 2: /* Multiply and absolute difference */
8603 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8604 "mul/mua/diff", dsc);
8605 break;
8606 case 3: /* Long multiply and divide */
8607 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8608 "lmul/lmua", dsc);
8609 break;
8610 }
8611 break;
8612 default: /* Coprocessor instructions */
8613 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8614 break;
8615 }
8616 break;
8617 default:
8618 err = 1;
8619 }
8620
8621 if (err)
8622 internal_error (__FILE__, __LINE__,
8623 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8624
8625 }
8626
8627 static void
8628 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8629 CORE_ADDR to, struct regcache *regs,
8630 struct displaced_step_closure *dsc)
8631 {
8632 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8633 uint16_t insn1
8634 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8635
8636 if (debug_displaced)
8637 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8638 "at %.8lx\n", insn1, (unsigned long) from);
8639
8640 dsc->is_thumb = 1;
8641 dsc->insn_size = thumb_insn_size (insn1);
8642 if (thumb_insn_size (insn1) == 4)
8643 {
8644 uint16_t insn2
8645 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8646 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8647 }
8648 else
8649 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8650 }
8651
8652 void
8653 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8654 CORE_ADDR to, struct regcache *regs,
8655 struct displaced_step_closure *dsc)
8656 {
8657 int err = 0;
8658 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8659 uint32_t insn;
8660
8661 /* Most displaced instructions use a 1-instruction scratch space, so set this
8662 here and override below if/when necessary. */
8663 dsc->numinsns = 1;
8664 dsc->insn_addr = from;
8665 dsc->scratch_base = to;
8666 dsc->cleanup = NULL;
8667 dsc->wrote_to_pc = 0;
8668
8669 if (!displaced_in_arm_mode (regs))
8670 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8671
8672 dsc->is_thumb = 0;
8673 dsc->insn_size = 4;
8674 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8675 if (debug_displaced)
8676 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8677 "at %.8lx\n", (unsigned long) insn,
8678 (unsigned long) from);
8679
8680 if ((insn & 0xf0000000) == 0xf0000000)
8681 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8682 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8683 {
8684 case 0x0: case 0x1: case 0x2: case 0x3:
8685 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8686 break;
8687
8688 case 0x4: case 0x5: case 0x6:
8689 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8690 break;
8691
8692 case 0x7:
8693 err = arm_decode_media (gdbarch, insn, dsc);
8694 break;
8695
8696 case 0x8: case 0x9: case 0xa: case 0xb:
8697 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8698 break;
8699
8700 case 0xc: case 0xd: case 0xe: case 0xf:
8701 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8702 break;
8703 }
8704
8705 if (err)
8706 internal_error (__FILE__, __LINE__,
8707 _("arm_process_displaced_insn: Instruction decode error"));
8708 }
8709
8710 /* Actually set up the scratch space for a displaced instruction. */
8711
8712 void
8713 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8714 CORE_ADDR to, struct displaced_step_closure *dsc)
8715 {
8716 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8717 unsigned int i, len, offset;
8718 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8719 int size = dsc->is_thumb? 2 : 4;
8720 const gdb_byte *bkp_insn;
8721
8722 offset = 0;
8723 /* Poke modified instruction(s). */
8724 for (i = 0; i < dsc->numinsns; i++)
8725 {
8726 if (debug_displaced)
8727 {
8728 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8729 if (size == 4)
8730 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8731 dsc->modinsn[i]);
8732 else if (size == 2)
8733 fprintf_unfiltered (gdb_stdlog, "%.4x",
8734 (unsigned short)dsc->modinsn[i]);
8735
8736 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8737 (unsigned long) to + offset);
8738
8739 }
8740 write_memory_unsigned_integer (to + offset, size,
8741 byte_order_for_code,
8742 dsc->modinsn[i]);
8743 offset += size;
8744 }
8745
8746 /* Choose the correct breakpoint instruction. */
8747 if (dsc->is_thumb)
8748 {
8749 bkp_insn = tdep->thumb_breakpoint;
8750 len = tdep->thumb_breakpoint_size;
8751 }
8752 else
8753 {
8754 bkp_insn = tdep->arm_breakpoint;
8755 len = tdep->arm_breakpoint_size;
8756 }
8757
8758 /* Put breakpoint afterwards. */
8759 write_memory (to + offset, bkp_insn, len);
8760
8761 if (debug_displaced)
8762 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8763 paddress (gdbarch, from), paddress (gdbarch, to));
8764 }
8765
8766 /* Entry point for copying an instruction into scratch space for displaced
8767 stepping. */
8768
8769 struct displaced_step_closure *
8770 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8771 CORE_ADDR from, CORE_ADDR to,
8772 struct regcache *regs)
8773 {
8774 struct displaced_step_closure *dsc
8775 = xmalloc (sizeof (struct displaced_step_closure));
8776 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8777 arm_displaced_init_closure (gdbarch, from, to, dsc);
8778
8779 return dsc;
8780 }
8781
8782 /* Entry point for cleaning things up after a displaced instruction has been
8783 single-stepped. */
8784
8785 void
8786 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8787 struct displaced_step_closure *dsc,
8788 CORE_ADDR from, CORE_ADDR to,
8789 struct regcache *regs)
8790 {
8791 if (dsc->cleanup)
8792 dsc->cleanup (gdbarch, regs, dsc);
8793
8794 if (!dsc->wrote_to_pc)
8795 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8796 dsc->insn_addr + dsc->insn_size);
8797
8798 }
8799
8800 #include "bfd-in2.h"
8801 #include "libcoff.h"
8802
8803 static int
8804 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8805 {
8806 struct gdbarch *gdbarch = info->application_data;
8807
8808 if (arm_pc_is_thumb (gdbarch, memaddr))
8809 {
8810 static asymbol *asym;
8811 static combined_entry_type ce;
8812 static struct coff_symbol_struct csym;
8813 static struct bfd fake_bfd;
8814 static bfd_target fake_target;
8815
8816 if (csym.native == NULL)
8817 {
8818 /* Create a fake symbol vector containing a Thumb symbol.
8819 This is solely so that the code in print_insn_little_arm()
8820 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8821 the presence of a Thumb symbol and switch to decoding
8822 Thumb instructions. */
8823
8824 fake_target.flavour = bfd_target_coff_flavour;
8825 fake_bfd.xvec = &fake_target;
8826 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8827 csym.native = &ce;
8828 csym.symbol.the_bfd = &fake_bfd;
8829 csym.symbol.name = "fake";
8830 asym = (asymbol *) & csym;
8831 }
8832
8833 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8834 info->symbols = &asym;
8835 }
8836 else
8837 info->symbols = NULL;
8838
8839 if (info->endian == BFD_ENDIAN_BIG)
8840 return print_insn_big_arm (memaddr, info);
8841 else
8842 return print_insn_little_arm (memaddr, info);
8843 }
8844
8845 /* The following define instruction sequences that will cause ARM
8846 cpu's to take an undefined instruction trap. These are used to
8847 signal a breakpoint to GDB.
8848
8849 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8850 modes. A different instruction is required for each mode. The ARM
8851 cpu's can also be big or little endian. Thus four different
8852 instructions are needed to support all cases.
8853
8854 Note: ARMv4 defines several new instructions that will take the
8855 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8856 not in fact add the new instructions. The new undefined
8857 instructions in ARMv4 are all instructions that had no defined
8858 behaviour in earlier chips. There is no guarantee that they will
8859 raise an exception, but may be treated as NOP's. In practice, it
8860 may only safe to rely on instructions matching:
8861
8862 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8863 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8864 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8865
8866 Even this may only true if the condition predicate is true. The
8867 following use a condition predicate of ALWAYS so it is always TRUE.
8868
8869 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8870 and NetBSD all use a software interrupt rather than an undefined
8871 instruction to force a trap. This can be handled by by the
8872 abi-specific code during establishment of the gdbarch vector. */
8873
8874 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8875 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8876 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8877 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8878
8879 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8880 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8881 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8882 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8883
8884 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8885 the program counter value to determine whether a 16-bit or 32-bit
8886 breakpoint should be used. It returns a pointer to a string of
8887 bytes that encode a breakpoint instruction, stores the length of
8888 the string to *lenptr, and adjusts the program counter (if
8889 necessary) to point to the actual memory location where the
8890 breakpoint should be inserted. */
8891
8892 static const unsigned char *
8893 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8894 {
8895 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8896 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8897
8898 if (arm_pc_is_thumb (gdbarch, *pcptr))
8899 {
8900 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8901
8902 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8903 check whether we are replacing a 32-bit instruction. */
8904 if (tdep->thumb2_breakpoint != NULL)
8905 {
8906 gdb_byte buf[2];
8907 if (target_read_memory (*pcptr, buf, 2) == 0)
8908 {
8909 unsigned short inst1;
8910 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8911 if (thumb_insn_size (inst1) == 4)
8912 {
8913 *lenptr = tdep->thumb2_breakpoint_size;
8914 return tdep->thumb2_breakpoint;
8915 }
8916 }
8917 }
8918
8919 *lenptr = tdep->thumb_breakpoint_size;
8920 return tdep->thumb_breakpoint;
8921 }
8922 else
8923 {
8924 *lenptr = tdep->arm_breakpoint_size;
8925 return tdep->arm_breakpoint;
8926 }
8927 }
8928
8929 static void
8930 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8931 int *kindptr)
8932 {
8933 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8934
8935 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8936 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8937 that this is not confused with a 32-bit ARM breakpoint. */
8938 *kindptr = 3;
8939 }
8940
8941 /* Extract from an array REGBUF containing the (raw) register state a
8942 function return value of type TYPE, and copy that, in virtual
8943 format, into VALBUF. */
8944
8945 static void
8946 arm_extract_return_value (struct type *type, struct regcache *regs,
8947 gdb_byte *valbuf)
8948 {
8949 struct gdbarch *gdbarch = get_regcache_arch (regs);
8950 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8951
8952 if (TYPE_CODE_FLT == TYPE_CODE (type))
8953 {
8954 switch (gdbarch_tdep (gdbarch)->fp_model)
8955 {
8956 case ARM_FLOAT_FPA:
8957 {
8958 /* The value is in register F0 in internal format. We need to
8959 extract the raw value and then convert it to the desired
8960 internal type. */
8961 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8962
8963 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8964 convert_from_extended (floatformat_from_type (type), tmpbuf,
8965 valbuf, gdbarch_byte_order (gdbarch));
8966 }
8967 break;
8968
8969 case ARM_FLOAT_SOFT_FPA:
8970 case ARM_FLOAT_SOFT_VFP:
8971 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8972 not using the VFP ABI code. */
8973 case ARM_FLOAT_VFP:
8974 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8975 if (TYPE_LENGTH (type) > 4)
8976 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8977 valbuf + INT_REGISTER_SIZE);
8978 break;
8979
8980 default:
8981 internal_error (__FILE__, __LINE__,
8982 _("arm_extract_return_value: "
8983 "Floating point model not supported"));
8984 break;
8985 }
8986 }
8987 else if (TYPE_CODE (type) == TYPE_CODE_INT
8988 || TYPE_CODE (type) == TYPE_CODE_CHAR
8989 || TYPE_CODE (type) == TYPE_CODE_BOOL
8990 || TYPE_CODE (type) == TYPE_CODE_PTR
8991 || TYPE_CODE (type) == TYPE_CODE_REF
8992 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8993 {
8994 /* If the type is a plain integer, then the access is
8995 straight-forward. Otherwise we have to play around a bit
8996 more. */
8997 int len = TYPE_LENGTH (type);
8998 int regno = ARM_A1_REGNUM;
8999 ULONGEST tmp;
9000
9001 while (len > 0)
9002 {
9003 /* By using store_unsigned_integer we avoid having to do
9004 anything special for small big-endian values. */
9005 regcache_cooked_read_unsigned (regs, regno++, &tmp);
9006 store_unsigned_integer (valbuf,
9007 (len > INT_REGISTER_SIZE
9008 ? INT_REGISTER_SIZE : len),
9009 byte_order, tmp);
9010 len -= INT_REGISTER_SIZE;
9011 valbuf += INT_REGISTER_SIZE;
9012 }
9013 }
9014 else
9015 {
9016 /* For a structure or union the behaviour is as if the value had
9017 been stored to word-aligned memory and then loaded into
9018 registers with 32-bit load instruction(s). */
9019 int len = TYPE_LENGTH (type);
9020 int regno = ARM_A1_REGNUM;
9021 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9022
9023 while (len > 0)
9024 {
9025 regcache_cooked_read (regs, regno++, tmpbuf);
9026 memcpy (valbuf, tmpbuf,
9027 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9028 len -= INT_REGISTER_SIZE;
9029 valbuf += INT_REGISTER_SIZE;
9030 }
9031 }
9032 }
9033
9034
9035 /* Will a function return an aggregate type in memory or in a
9036 register? Return 0 if an aggregate type can be returned in a
9037 register, 1 if it must be returned in memory. */
9038
9039 static int
9040 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9041 {
9042 int nRc;
9043 enum type_code code;
9044
9045 CHECK_TYPEDEF (type);
9046
9047 /* In the ARM ABI, "integer" like aggregate types are returned in
9048 registers. For an aggregate type to be integer like, its size
9049 must be less than or equal to INT_REGISTER_SIZE and the
9050 offset of each addressable subfield must be zero. Note that bit
9051 fields are not addressable, and all addressable subfields of
9052 unions always start at offset zero.
9053
9054 This function is based on the behaviour of GCC 2.95.1.
9055 See: gcc/arm.c: arm_return_in_memory() for details.
9056
9057 Note: All versions of GCC before GCC 2.95.2 do not set up the
9058 parameters correctly for a function returning the following
9059 structure: struct { float f;}; This should be returned in memory,
9060 not a register. Richard Earnshaw sent me a patch, but I do not
9061 know of any way to detect if a function like the above has been
9062 compiled with the correct calling convention. */
9063
9064 /* All aggregate types that won't fit in a register must be returned
9065 in memory. */
9066 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9067 {
9068 return 1;
9069 }
9070
9071 /* The AAPCS says all aggregates not larger than a word are returned
9072 in a register. */
9073 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9074 return 0;
9075
9076 /* The only aggregate types that can be returned in a register are
9077 structs and unions. Arrays must be returned in memory. */
9078 code = TYPE_CODE (type);
9079 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9080 {
9081 return 1;
9082 }
9083
9084 /* Assume all other aggregate types can be returned in a register.
9085 Run a check for structures, unions and arrays. */
9086 nRc = 0;
9087
9088 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9089 {
9090 int i;
9091 /* Need to check if this struct/union is "integer" like. For
9092 this to be true, its size must be less than or equal to
9093 INT_REGISTER_SIZE and the offset of each addressable
9094 subfield must be zero. Note that bit fields are not
9095 addressable, and unions always start at offset zero. If any
9096 of the subfields is a floating point type, the struct/union
9097 cannot be an integer type. */
9098
9099 /* For each field in the object, check:
9100 1) Is it FP? --> yes, nRc = 1;
9101 2) Is it addressable (bitpos != 0) and
9102 not packed (bitsize == 0)?
9103 --> yes, nRc = 1
9104 */
9105
9106 for (i = 0; i < TYPE_NFIELDS (type); i++)
9107 {
9108 enum type_code field_type_code;
9109 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9110 i)));
9111
9112 /* Is it a floating point type field? */
9113 if (field_type_code == TYPE_CODE_FLT)
9114 {
9115 nRc = 1;
9116 break;
9117 }
9118
9119 /* If bitpos != 0, then we have to care about it. */
9120 if (TYPE_FIELD_BITPOS (type, i) != 0)
9121 {
9122 /* Bitfields are not addressable. If the field bitsize is
9123 zero, then the field is not packed. Hence it cannot be
9124 a bitfield or any other packed type. */
9125 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9126 {
9127 nRc = 1;
9128 break;
9129 }
9130 }
9131 }
9132 }
9133
9134 return nRc;
9135 }
9136
9137 /* Write into appropriate registers a function return value of type
9138 TYPE, given in virtual format. */
9139
9140 static void
9141 arm_store_return_value (struct type *type, struct regcache *regs,
9142 const gdb_byte *valbuf)
9143 {
9144 struct gdbarch *gdbarch = get_regcache_arch (regs);
9145 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9146
9147 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9148 {
9149 gdb_byte buf[MAX_REGISTER_SIZE];
9150
9151 switch (gdbarch_tdep (gdbarch)->fp_model)
9152 {
9153 case ARM_FLOAT_FPA:
9154
9155 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9156 gdbarch_byte_order (gdbarch));
9157 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9158 break;
9159
9160 case ARM_FLOAT_SOFT_FPA:
9161 case ARM_FLOAT_SOFT_VFP:
9162 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9163 not using the VFP ABI code. */
9164 case ARM_FLOAT_VFP:
9165 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9166 if (TYPE_LENGTH (type) > 4)
9167 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9168 valbuf + INT_REGISTER_SIZE);
9169 break;
9170
9171 default:
9172 internal_error (__FILE__, __LINE__,
9173 _("arm_store_return_value: Floating "
9174 "point model not supported"));
9175 break;
9176 }
9177 }
9178 else if (TYPE_CODE (type) == TYPE_CODE_INT
9179 || TYPE_CODE (type) == TYPE_CODE_CHAR
9180 || TYPE_CODE (type) == TYPE_CODE_BOOL
9181 || TYPE_CODE (type) == TYPE_CODE_PTR
9182 || TYPE_CODE (type) == TYPE_CODE_REF
9183 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9184 {
9185 if (TYPE_LENGTH (type) <= 4)
9186 {
9187 /* Values of one word or less are zero/sign-extended and
9188 returned in r0. */
9189 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9190 LONGEST val = unpack_long (type, valbuf);
9191
9192 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9193 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9194 }
9195 else
9196 {
9197 /* Integral values greater than one word are stored in consecutive
9198 registers starting with r0. This will always be a multiple of
9199 the regiser size. */
9200 int len = TYPE_LENGTH (type);
9201 int regno = ARM_A1_REGNUM;
9202
9203 while (len > 0)
9204 {
9205 regcache_cooked_write (regs, regno++, valbuf);
9206 len -= INT_REGISTER_SIZE;
9207 valbuf += INT_REGISTER_SIZE;
9208 }
9209 }
9210 }
9211 else
9212 {
9213 /* For a structure or union the behaviour is as if the value had
9214 been stored to word-aligned memory and then loaded into
9215 registers with 32-bit load instruction(s). */
9216 int len = TYPE_LENGTH (type);
9217 int regno = ARM_A1_REGNUM;
9218 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9219
9220 while (len > 0)
9221 {
9222 memcpy (tmpbuf, valbuf,
9223 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9224 regcache_cooked_write (regs, regno++, tmpbuf);
9225 len -= INT_REGISTER_SIZE;
9226 valbuf += INT_REGISTER_SIZE;
9227 }
9228 }
9229 }
9230
9231
9232 /* Handle function return values. */
9233
9234 static enum return_value_convention
9235 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9236 struct type *valtype, struct regcache *regcache,
9237 gdb_byte *readbuf, const gdb_byte *writebuf)
9238 {
9239 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9240 struct type *func_type = function ? value_type (function) : NULL;
9241 enum arm_vfp_cprc_base_type vfp_base_type;
9242 int vfp_base_count;
9243
9244 if (arm_vfp_abi_for_function (gdbarch, func_type)
9245 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9246 {
9247 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9248 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9249 int i;
9250 for (i = 0; i < vfp_base_count; i++)
9251 {
9252 if (reg_char == 'q')
9253 {
9254 if (writebuf)
9255 arm_neon_quad_write (gdbarch, regcache, i,
9256 writebuf + i * unit_length);
9257
9258 if (readbuf)
9259 arm_neon_quad_read (gdbarch, regcache, i,
9260 readbuf + i * unit_length);
9261 }
9262 else
9263 {
9264 char name_buf[4];
9265 int regnum;
9266
9267 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9268 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9269 strlen (name_buf));
9270 if (writebuf)
9271 regcache_cooked_write (regcache, regnum,
9272 writebuf + i * unit_length);
9273 if (readbuf)
9274 regcache_cooked_read (regcache, regnum,
9275 readbuf + i * unit_length);
9276 }
9277 }
9278 return RETURN_VALUE_REGISTER_CONVENTION;
9279 }
9280
9281 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9282 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9283 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9284 {
9285 if (tdep->struct_return == pcc_struct_return
9286 || arm_return_in_memory (gdbarch, valtype))
9287 return RETURN_VALUE_STRUCT_CONVENTION;
9288 }
9289
9290 /* AAPCS returns complex types longer than a register in memory. */
9291 if (tdep->arm_abi != ARM_ABI_APCS
9292 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9293 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9294 return RETURN_VALUE_STRUCT_CONVENTION;
9295
9296 if (writebuf)
9297 arm_store_return_value (valtype, regcache, writebuf);
9298
9299 if (readbuf)
9300 arm_extract_return_value (valtype, regcache, readbuf);
9301
9302 return RETURN_VALUE_REGISTER_CONVENTION;
9303 }
9304
9305
9306 static int
9307 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9308 {
9309 struct gdbarch *gdbarch = get_frame_arch (frame);
9310 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9311 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9312 CORE_ADDR jb_addr;
9313 gdb_byte buf[INT_REGISTER_SIZE];
9314
9315 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9316
9317 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9318 INT_REGISTER_SIZE))
9319 return 0;
9320
9321 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9322 return 1;
9323 }
9324
9325 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9326 return the target PC. Otherwise return 0. */
9327
9328 CORE_ADDR
9329 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9330 {
9331 const char *name;
9332 int namelen;
9333 CORE_ADDR start_addr;
9334
9335 /* Find the starting address and name of the function containing the PC. */
9336 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9337 {
9338 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9339 check here. */
9340 start_addr = arm_skip_bx_reg (frame, pc);
9341 if (start_addr != 0)
9342 return start_addr;
9343
9344 return 0;
9345 }
9346
9347 /* If PC is in a Thumb call or return stub, return the address of the
9348 target PC, which is in a register. The thunk functions are called
9349 _call_via_xx, where x is the register name. The possible names
9350 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9351 functions, named __ARM_call_via_r[0-7]. */
9352 if (strncmp (name, "_call_via_", 10) == 0
9353 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9354 {
9355 /* Use the name suffix to determine which register contains the
9356 target PC. */
9357 static char *table[15] =
9358 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9359 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9360 };
9361 int regno;
9362 int offset = strlen (name) - 2;
9363
9364 for (regno = 0; regno <= 14; regno++)
9365 if (strcmp (&name[offset], table[regno]) == 0)
9366 return get_frame_register_unsigned (frame, regno);
9367 }
9368
9369 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9370 non-interworking calls to foo. We could decode the stubs
9371 to find the target but it's easier to use the symbol table. */
9372 namelen = strlen (name);
9373 if (name[0] == '_' && name[1] == '_'
9374 && ((namelen > 2 + strlen ("_from_thumb")
9375 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9376 strlen ("_from_thumb")) == 0)
9377 || (namelen > 2 + strlen ("_from_arm")
9378 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9379 strlen ("_from_arm")) == 0)))
9380 {
9381 char *target_name;
9382 int target_len = namelen - 2;
9383 struct bound_minimal_symbol minsym;
9384 struct objfile *objfile;
9385 struct obj_section *sec;
9386
9387 if (name[namelen - 1] == 'b')
9388 target_len -= strlen ("_from_thumb");
9389 else
9390 target_len -= strlen ("_from_arm");
9391
9392 target_name = alloca (target_len + 1);
9393 memcpy (target_name, name + 2, target_len);
9394 target_name[target_len] = '\0';
9395
9396 sec = find_pc_section (pc);
9397 objfile = (sec == NULL) ? NULL : sec->objfile;
9398 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9399 if (minsym.minsym != NULL)
9400 return BMSYMBOL_VALUE_ADDRESS (minsym);
9401 else
9402 return 0;
9403 }
9404
9405 return 0; /* not a stub */
9406 }
9407
9408 static void
9409 set_arm_command (char *args, int from_tty)
9410 {
9411 printf_unfiltered (_("\
9412 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9413 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9414 }
9415
9416 static void
9417 show_arm_command (char *args, int from_tty)
9418 {
9419 cmd_show_list (showarmcmdlist, from_tty, "");
9420 }
9421
9422 static void
9423 arm_update_current_architecture (void)
9424 {
9425 struct gdbarch_info info;
9426
9427 /* If the current architecture is not ARM, we have nothing to do. */
9428 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9429 return;
9430
9431 /* Update the architecture. */
9432 gdbarch_info_init (&info);
9433
9434 if (!gdbarch_update_p (info))
9435 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9436 }
9437
9438 static void
9439 set_fp_model_sfunc (char *args, int from_tty,
9440 struct cmd_list_element *c)
9441 {
9442 enum arm_float_model fp_model;
9443
9444 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9445 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9446 {
9447 arm_fp_model = fp_model;
9448 break;
9449 }
9450
9451 if (fp_model == ARM_FLOAT_LAST)
9452 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9453 current_fp_model);
9454
9455 arm_update_current_architecture ();
9456 }
9457
9458 static void
9459 show_fp_model (struct ui_file *file, int from_tty,
9460 struct cmd_list_element *c, const char *value)
9461 {
9462 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9463
9464 if (arm_fp_model == ARM_FLOAT_AUTO
9465 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9466 fprintf_filtered (file, _("\
9467 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9468 fp_model_strings[tdep->fp_model]);
9469 else
9470 fprintf_filtered (file, _("\
9471 The current ARM floating point model is \"%s\".\n"),
9472 fp_model_strings[arm_fp_model]);
9473 }
9474
9475 static void
9476 arm_set_abi (char *args, int from_tty,
9477 struct cmd_list_element *c)
9478 {
9479 enum arm_abi_kind arm_abi;
9480
9481 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9482 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9483 {
9484 arm_abi_global = arm_abi;
9485 break;
9486 }
9487
9488 if (arm_abi == ARM_ABI_LAST)
9489 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9490 arm_abi_string);
9491
9492 arm_update_current_architecture ();
9493 }
9494
9495 static void
9496 arm_show_abi (struct ui_file *file, int from_tty,
9497 struct cmd_list_element *c, const char *value)
9498 {
9499 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9500
9501 if (arm_abi_global == ARM_ABI_AUTO
9502 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9503 fprintf_filtered (file, _("\
9504 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9505 arm_abi_strings[tdep->arm_abi]);
9506 else
9507 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9508 arm_abi_string);
9509 }
9510
9511 static void
9512 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9513 struct cmd_list_element *c, const char *value)
9514 {
9515 fprintf_filtered (file,
9516 _("The current execution mode assumed "
9517 "(when symbols are unavailable) is \"%s\".\n"),
9518 arm_fallback_mode_string);
9519 }
9520
9521 static void
9522 arm_show_force_mode (struct ui_file *file, int from_tty,
9523 struct cmd_list_element *c, const char *value)
9524 {
9525 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9526
9527 fprintf_filtered (file,
9528 _("The current execution mode assumed "
9529 "(even when symbols are available) is \"%s\".\n"),
9530 arm_force_mode_string);
9531 }
9532
9533 /* If the user changes the register disassembly style used for info
9534 register and other commands, we have to also switch the style used
9535 in opcodes for disassembly output. This function is run in the "set
9536 arm disassembly" command, and does that. */
9537
9538 static void
9539 set_disassembly_style_sfunc (char *args, int from_tty,
9540 struct cmd_list_element *c)
9541 {
9542 set_disassembly_style ();
9543 }
9544 \f
9545 /* Return the ARM register name corresponding to register I. */
9546 static const char *
9547 arm_register_name (struct gdbarch *gdbarch, int i)
9548 {
9549 const int num_regs = gdbarch_num_regs (gdbarch);
9550
9551 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9552 && i >= num_regs && i < num_regs + 32)
9553 {
9554 static const char *const vfp_pseudo_names[] = {
9555 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9556 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9557 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9558 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9559 };
9560
9561 return vfp_pseudo_names[i - num_regs];
9562 }
9563
9564 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9565 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9566 {
9567 static const char *const neon_pseudo_names[] = {
9568 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9569 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9570 };
9571
9572 return neon_pseudo_names[i - num_regs - 32];
9573 }
9574
9575 if (i >= ARRAY_SIZE (arm_register_names))
9576 /* These registers are only supported on targets which supply
9577 an XML description. */
9578 return "";
9579
9580 return arm_register_names[i];
9581 }
9582
9583 static void
9584 set_disassembly_style (void)
9585 {
9586 int current;
9587
9588 /* Find the style that the user wants. */
9589 for (current = 0; current < num_disassembly_options; current++)
9590 if (disassembly_style == valid_disassembly_styles[current])
9591 break;
9592 gdb_assert (current < num_disassembly_options);
9593
9594 /* Synchronize the disassembler. */
9595 set_arm_regname_option (current);
9596 }
9597
9598 /* Test whether the coff symbol specific value corresponds to a Thumb
9599 function. */
9600
9601 static int
9602 coff_sym_is_thumb (int val)
9603 {
9604 return (val == C_THUMBEXT
9605 || val == C_THUMBSTAT
9606 || val == C_THUMBEXTFUNC
9607 || val == C_THUMBSTATFUNC
9608 || val == C_THUMBLABEL);
9609 }
9610
9611 /* arm_coff_make_msymbol_special()
9612 arm_elf_make_msymbol_special()
9613
9614 These functions test whether the COFF or ELF symbol corresponds to
9615 an address in thumb code, and set a "special" bit in a minimal
9616 symbol to indicate that it does. */
9617
9618 static void
9619 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9620 {
9621 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9622 == ST_BRANCH_TO_THUMB)
9623 MSYMBOL_SET_SPECIAL (msym);
9624 }
9625
9626 static void
9627 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9628 {
9629 if (coff_sym_is_thumb (val))
9630 MSYMBOL_SET_SPECIAL (msym);
9631 }
9632
9633 static void
9634 arm_objfile_data_free (struct objfile *objfile, void *arg)
9635 {
9636 struct arm_per_objfile *data = arg;
9637 unsigned int i;
9638
9639 for (i = 0; i < objfile->obfd->section_count; i++)
9640 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9641 }
9642
9643 static void
9644 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9645 asymbol *sym)
9646 {
9647 const char *name = bfd_asymbol_name (sym);
9648 struct arm_per_objfile *data;
9649 VEC(arm_mapping_symbol_s) **map_p;
9650 struct arm_mapping_symbol new_map_sym;
9651
9652 gdb_assert (name[0] == '$');
9653 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9654 return;
9655
9656 data = objfile_data (objfile, arm_objfile_data_key);
9657 if (data == NULL)
9658 {
9659 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9660 struct arm_per_objfile);
9661 set_objfile_data (objfile, arm_objfile_data_key, data);
9662 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9663 objfile->obfd->section_count,
9664 VEC(arm_mapping_symbol_s) *);
9665 }
9666 map_p = &data->section_maps[bfd_get_section (sym)->index];
9667
9668 new_map_sym.value = sym->value;
9669 new_map_sym.type = name[1];
9670
9671 /* Assume that most mapping symbols appear in order of increasing
9672 value. If they were randomly distributed, it would be faster to
9673 always push here and then sort at first use. */
9674 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9675 {
9676 struct arm_mapping_symbol *prev_map_sym;
9677
9678 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9679 if (prev_map_sym->value >= sym->value)
9680 {
9681 unsigned int idx;
9682 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9683 arm_compare_mapping_symbols);
9684 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9685 return;
9686 }
9687 }
9688
9689 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9690 }
9691
9692 static void
9693 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9694 {
9695 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9696 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9697
9698 /* If necessary, set the T bit. */
9699 if (arm_apcs_32)
9700 {
9701 ULONGEST val, t_bit;
9702 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9703 t_bit = arm_psr_thumb_bit (gdbarch);
9704 if (arm_pc_is_thumb (gdbarch, pc))
9705 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9706 val | t_bit);
9707 else
9708 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9709 val & ~t_bit);
9710 }
9711 }
9712
9713 /* Read the contents of a NEON quad register, by reading from two
9714 double registers. This is used to implement the quad pseudo
9715 registers, and for argument passing in case the quad registers are
9716 missing; vectors are passed in quad registers when using the VFP
9717 ABI, even if a NEON unit is not present. REGNUM is the index of
9718 the quad register, in [0, 15]. */
9719
9720 static enum register_status
9721 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9722 int regnum, gdb_byte *buf)
9723 {
9724 char name_buf[4];
9725 gdb_byte reg_buf[8];
9726 int offset, double_regnum;
9727 enum register_status status;
9728
9729 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9730 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9731 strlen (name_buf));
9732
9733 /* d0 is always the least significant half of q0. */
9734 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9735 offset = 8;
9736 else
9737 offset = 0;
9738
9739 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9740 if (status != REG_VALID)
9741 return status;
9742 memcpy (buf + offset, reg_buf, 8);
9743
9744 offset = 8 - offset;
9745 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9746 if (status != REG_VALID)
9747 return status;
9748 memcpy (buf + offset, reg_buf, 8);
9749
9750 return REG_VALID;
9751 }
9752
9753 static enum register_status
9754 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9755 int regnum, gdb_byte *buf)
9756 {
9757 const int num_regs = gdbarch_num_regs (gdbarch);
9758 char name_buf[4];
9759 gdb_byte reg_buf[8];
9760 int offset, double_regnum;
9761
9762 gdb_assert (regnum >= num_regs);
9763 regnum -= num_regs;
9764
9765 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9766 /* Quad-precision register. */
9767 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9768 else
9769 {
9770 enum register_status status;
9771
9772 /* Single-precision register. */
9773 gdb_assert (regnum < 32);
9774
9775 /* s0 is always the least significant half of d0. */
9776 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9777 offset = (regnum & 1) ? 0 : 4;
9778 else
9779 offset = (regnum & 1) ? 4 : 0;
9780
9781 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9782 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9783 strlen (name_buf));
9784
9785 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9786 if (status == REG_VALID)
9787 memcpy (buf, reg_buf + offset, 4);
9788 return status;
9789 }
9790 }
9791
9792 /* Store the contents of BUF to a NEON quad register, by writing to
9793 two double registers. This is used to implement the quad pseudo
9794 registers, and for argument passing in case the quad registers are
9795 missing; vectors are passed in quad registers when using the VFP
9796 ABI, even if a NEON unit is not present. REGNUM is the index
9797 of the quad register, in [0, 15]. */
9798
9799 static void
9800 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9801 int regnum, const gdb_byte *buf)
9802 {
9803 char name_buf[4];
9804 int offset, double_regnum;
9805
9806 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9807 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9808 strlen (name_buf));
9809
9810 /* d0 is always the least significant half of q0. */
9811 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9812 offset = 8;
9813 else
9814 offset = 0;
9815
9816 regcache_raw_write (regcache, double_regnum, buf + offset);
9817 offset = 8 - offset;
9818 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9819 }
9820
9821 static void
9822 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9823 int regnum, const gdb_byte *buf)
9824 {
9825 const int num_regs = gdbarch_num_regs (gdbarch);
9826 char name_buf[4];
9827 gdb_byte reg_buf[8];
9828 int offset, double_regnum;
9829
9830 gdb_assert (regnum >= num_regs);
9831 regnum -= num_regs;
9832
9833 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9834 /* Quad-precision register. */
9835 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9836 else
9837 {
9838 /* Single-precision register. */
9839 gdb_assert (regnum < 32);
9840
9841 /* s0 is always the least significant half of d0. */
9842 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9843 offset = (regnum & 1) ? 0 : 4;
9844 else
9845 offset = (regnum & 1) ? 4 : 0;
9846
9847 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9848 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9849 strlen (name_buf));
9850
9851 regcache_raw_read (regcache, double_regnum, reg_buf);
9852 memcpy (reg_buf + offset, buf, 4);
9853 regcache_raw_write (regcache, double_regnum, reg_buf);
9854 }
9855 }
9856
9857 static struct value *
9858 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9859 {
9860 const int *reg_p = baton;
9861 return value_of_register (*reg_p, frame);
9862 }
9863 \f
9864 static enum gdb_osabi
9865 arm_elf_osabi_sniffer (bfd *abfd)
9866 {
9867 unsigned int elfosabi;
9868 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9869
9870 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9871
9872 if (elfosabi == ELFOSABI_ARM)
9873 /* GNU tools use this value. Check note sections in this case,
9874 as well. */
9875 bfd_map_over_sections (abfd,
9876 generic_elf_osabi_sniff_abi_tag_sections,
9877 &osabi);
9878
9879 /* Anything else will be handled by the generic ELF sniffer. */
9880 return osabi;
9881 }
9882
9883 static int
9884 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9885 struct reggroup *group)
9886 {
9887 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9888 this, FPS register belongs to save_regroup, restore_reggroup, and
9889 all_reggroup, of course. */
9890 if (regnum == ARM_FPS_REGNUM)
9891 return (group == float_reggroup
9892 || group == save_reggroup
9893 || group == restore_reggroup
9894 || group == all_reggroup);
9895 else
9896 return default_register_reggroup_p (gdbarch, regnum, group);
9897 }
9898
9899 \f
9900 /* For backward-compatibility we allow two 'g' packet lengths with
9901 the remote protocol depending on whether FPA registers are
9902 supplied. M-profile targets do not have FPA registers, but some
9903 stubs already exist in the wild which use a 'g' packet which
9904 supplies them albeit with dummy values. The packet format which
9905 includes FPA registers should be considered deprecated for
9906 M-profile targets. */
9907
9908 static void
9909 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9910 {
9911 if (gdbarch_tdep (gdbarch)->is_m)
9912 {
9913 /* If we know from the executable this is an M-profile target,
9914 cater for remote targets whose register set layout is the
9915 same as the FPA layout. */
9916 register_remote_g_packet_guess (gdbarch,
9917 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9918 (16 * INT_REGISTER_SIZE)
9919 + (8 * FP_REGISTER_SIZE)
9920 + (2 * INT_REGISTER_SIZE),
9921 tdesc_arm_with_m_fpa_layout);
9922
9923 /* The regular M-profile layout. */
9924 register_remote_g_packet_guess (gdbarch,
9925 /* r0-r12,sp,lr,pc; xpsr */
9926 (16 * INT_REGISTER_SIZE)
9927 + INT_REGISTER_SIZE,
9928 tdesc_arm_with_m);
9929
9930 /* M-profile plus M4F VFP. */
9931 register_remote_g_packet_guess (gdbarch,
9932 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9933 (16 * INT_REGISTER_SIZE)
9934 + (16 * VFP_REGISTER_SIZE)
9935 + (2 * INT_REGISTER_SIZE),
9936 tdesc_arm_with_m_vfp_d16);
9937 }
9938
9939 /* Otherwise we don't have a useful guess. */
9940 }
9941
9942 \f
9943 /* Initialize the current architecture based on INFO. If possible,
9944 re-use an architecture from ARCHES, which is a list of
9945 architectures already created during this debugging session.
9946
9947 Called e.g. at program startup, when reading a core file, and when
9948 reading a binary file. */
9949
9950 static struct gdbarch *
9951 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9952 {
9953 struct gdbarch_tdep *tdep;
9954 struct gdbarch *gdbarch;
9955 struct gdbarch_list *best_arch;
9956 enum arm_abi_kind arm_abi = arm_abi_global;
9957 enum arm_float_model fp_model = arm_fp_model;
9958 struct tdesc_arch_data *tdesc_data = NULL;
9959 int i, is_m = 0;
9960 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9961 int have_neon = 0;
9962 int have_fpa_registers = 1;
9963 const struct target_desc *tdesc = info.target_desc;
9964
9965 /* If we have an object to base this architecture on, try to determine
9966 its ABI. */
9967
9968 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9969 {
9970 int ei_osabi, e_flags;
9971
9972 switch (bfd_get_flavour (info.abfd))
9973 {
9974 case bfd_target_aout_flavour:
9975 /* Assume it's an old APCS-style ABI. */
9976 arm_abi = ARM_ABI_APCS;
9977 break;
9978
9979 case bfd_target_coff_flavour:
9980 /* Assume it's an old APCS-style ABI. */
9981 /* XXX WinCE? */
9982 arm_abi = ARM_ABI_APCS;
9983 break;
9984
9985 case bfd_target_elf_flavour:
9986 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9987 e_flags = elf_elfheader (info.abfd)->e_flags;
9988
9989 if (ei_osabi == ELFOSABI_ARM)
9990 {
9991 /* GNU tools used to use this value, but do not for EABI
9992 objects. There's nowhere to tag an EABI version
9993 anyway, so assume APCS. */
9994 arm_abi = ARM_ABI_APCS;
9995 }
9996 else if (ei_osabi == ELFOSABI_NONE)
9997 {
9998 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9999 int attr_arch, attr_profile;
10000
10001 switch (eabi_ver)
10002 {
10003 case EF_ARM_EABI_UNKNOWN:
10004 /* Assume GNU tools. */
10005 arm_abi = ARM_ABI_APCS;
10006 break;
10007
10008 case EF_ARM_EABI_VER4:
10009 case EF_ARM_EABI_VER5:
10010 arm_abi = ARM_ABI_AAPCS;
10011 /* EABI binaries default to VFP float ordering.
10012 They may also contain build attributes that can
10013 be used to identify if the VFP argument-passing
10014 ABI is in use. */
10015 if (fp_model == ARM_FLOAT_AUTO)
10016 {
10017 #ifdef HAVE_ELF
10018 switch (bfd_elf_get_obj_attr_int (info.abfd,
10019 OBJ_ATTR_PROC,
10020 Tag_ABI_VFP_args))
10021 {
10022 case 0:
10023 /* "The user intended FP parameter/result
10024 passing to conform to AAPCS, base
10025 variant". */
10026 fp_model = ARM_FLOAT_SOFT_VFP;
10027 break;
10028 case 1:
10029 /* "The user intended FP parameter/result
10030 passing to conform to AAPCS, VFP
10031 variant". */
10032 fp_model = ARM_FLOAT_VFP;
10033 break;
10034 case 2:
10035 /* "The user intended FP parameter/result
10036 passing to conform to tool chain-specific
10037 conventions" - we don't know any such
10038 conventions, so leave it as "auto". */
10039 break;
10040 default:
10041 /* Attribute value not mentioned in the
10042 October 2008 ABI, so leave it as
10043 "auto". */
10044 break;
10045 }
10046 #else
10047 fp_model = ARM_FLOAT_SOFT_VFP;
10048 #endif
10049 }
10050 break;
10051
10052 default:
10053 /* Leave it as "auto". */
10054 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10055 break;
10056 }
10057
10058 #ifdef HAVE_ELF
10059 /* Detect M-profile programs. This only works if the
10060 executable file includes build attributes; GCC does
10061 copy them to the executable, but e.g. RealView does
10062 not. */
10063 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10064 Tag_CPU_arch);
10065 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10066 OBJ_ATTR_PROC,
10067 Tag_CPU_arch_profile);
10068 /* GCC specifies the profile for v6-M; RealView only
10069 specifies the profile for architectures starting with
10070 V7 (as opposed to architectures with a tag
10071 numerically greater than TAG_CPU_ARCH_V7). */
10072 if (!tdesc_has_registers (tdesc)
10073 && (attr_arch == TAG_CPU_ARCH_V6_M
10074 || attr_arch == TAG_CPU_ARCH_V6S_M
10075 || attr_profile == 'M'))
10076 is_m = 1;
10077 #endif
10078 }
10079
10080 if (fp_model == ARM_FLOAT_AUTO)
10081 {
10082 int e_flags = elf_elfheader (info.abfd)->e_flags;
10083
10084 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10085 {
10086 case 0:
10087 /* Leave it as "auto". Strictly speaking this case
10088 means FPA, but almost nobody uses that now, and
10089 many toolchains fail to set the appropriate bits
10090 for the floating-point model they use. */
10091 break;
10092 case EF_ARM_SOFT_FLOAT:
10093 fp_model = ARM_FLOAT_SOFT_FPA;
10094 break;
10095 case EF_ARM_VFP_FLOAT:
10096 fp_model = ARM_FLOAT_VFP;
10097 break;
10098 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10099 fp_model = ARM_FLOAT_SOFT_VFP;
10100 break;
10101 }
10102 }
10103
10104 if (e_flags & EF_ARM_BE8)
10105 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10106
10107 break;
10108
10109 default:
10110 /* Leave it as "auto". */
10111 break;
10112 }
10113 }
10114
10115 /* Check any target description for validity. */
10116 if (tdesc_has_registers (tdesc))
10117 {
10118 /* For most registers we require GDB's default names; but also allow
10119 the numeric names for sp / lr / pc, as a convenience. */
10120 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10121 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10122 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10123
10124 const struct tdesc_feature *feature;
10125 int valid_p;
10126
10127 feature = tdesc_find_feature (tdesc,
10128 "org.gnu.gdb.arm.core");
10129 if (feature == NULL)
10130 {
10131 feature = tdesc_find_feature (tdesc,
10132 "org.gnu.gdb.arm.m-profile");
10133 if (feature == NULL)
10134 return NULL;
10135 else
10136 is_m = 1;
10137 }
10138
10139 tdesc_data = tdesc_data_alloc ();
10140
10141 valid_p = 1;
10142 for (i = 0; i < ARM_SP_REGNUM; i++)
10143 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10144 arm_register_names[i]);
10145 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10146 ARM_SP_REGNUM,
10147 arm_sp_names);
10148 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10149 ARM_LR_REGNUM,
10150 arm_lr_names);
10151 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10152 ARM_PC_REGNUM,
10153 arm_pc_names);
10154 if (is_m)
10155 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10156 ARM_PS_REGNUM, "xpsr");
10157 else
10158 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10159 ARM_PS_REGNUM, "cpsr");
10160
10161 if (!valid_p)
10162 {
10163 tdesc_data_cleanup (tdesc_data);
10164 return NULL;
10165 }
10166
10167 feature = tdesc_find_feature (tdesc,
10168 "org.gnu.gdb.arm.fpa");
10169 if (feature != NULL)
10170 {
10171 valid_p = 1;
10172 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10173 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10174 arm_register_names[i]);
10175 if (!valid_p)
10176 {
10177 tdesc_data_cleanup (tdesc_data);
10178 return NULL;
10179 }
10180 }
10181 else
10182 have_fpa_registers = 0;
10183
10184 feature = tdesc_find_feature (tdesc,
10185 "org.gnu.gdb.xscale.iwmmxt");
10186 if (feature != NULL)
10187 {
10188 static const char *const iwmmxt_names[] = {
10189 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10190 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10191 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10192 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10193 };
10194
10195 valid_p = 1;
10196 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10197 valid_p
10198 &= tdesc_numbered_register (feature, tdesc_data, i,
10199 iwmmxt_names[i - ARM_WR0_REGNUM]);
10200
10201 /* Check for the control registers, but do not fail if they
10202 are missing. */
10203 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10204 tdesc_numbered_register (feature, tdesc_data, i,
10205 iwmmxt_names[i - ARM_WR0_REGNUM]);
10206
10207 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10208 valid_p
10209 &= tdesc_numbered_register (feature, tdesc_data, i,
10210 iwmmxt_names[i - ARM_WR0_REGNUM]);
10211
10212 if (!valid_p)
10213 {
10214 tdesc_data_cleanup (tdesc_data);
10215 return NULL;
10216 }
10217 }
10218
10219 /* If we have a VFP unit, check whether the single precision registers
10220 are present. If not, then we will synthesize them as pseudo
10221 registers. */
10222 feature = tdesc_find_feature (tdesc,
10223 "org.gnu.gdb.arm.vfp");
10224 if (feature != NULL)
10225 {
10226 static const char *const vfp_double_names[] = {
10227 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10228 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10229 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10230 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10231 };
10232
10233 /* Require the double precision registers. There must be either
10234 16 or 32. */
10235 valid_p = 1;
10236 for (i = 0; i < 32; i++)
10237 {
10238 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10239 ARM_D0_REGNUM + i,
10240 vfp_double_names[i]);
10241 if (!valid_p)
10242 break;
10243 }
10244 if (!valid_p && i == 16)
10245 valid_p = 1;
10246
10247 /* Also require FPSCR. */
10248 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10249 ARM_FPSCR_REGNUM, "fpscr");
10250 if (!valid_p)
10251 {
10252 tdesc_data_cleanup (tdesc_data);
10253 return NULL;
10254 }
10255
10256 if (tdesc_unnumbered_register (feature, "s0") == 0)
10257 have_vfp_pseudos = 1;
10258
10259 have_vfp_registers = 1;
10260
10261 /* If we have VFP, also check for NEON. The architecture allows
10262 NEON without VFP (integer vector operations only), but GDB
10263 does not support that. */
10264 feature = tdesc_find_feature (tdesc,
10265 "org.gnu.gdb.arm.neon");
10266 if (feature != NULL)
10267 {
10268 /* NEON requires 32 double-precision registers. */
10269 if (i != 32)
10270 {
10271 tdesc_data_cleanup (tdesc_data);
10272 return NULL;
10273 }
10274
10275 /* If there are quad registers defined by the stub, use
10276 their type; otherwise (normally) provide them with
10277 the default type. */
10278 if (tdesc_unnumbered_register (feature, "q0") == 0)
10279 have_neon_pseudos = 1;
10280
10281 have_neon = 1;
10282 }
10283 }
10284 }
10285
10286 /* If there is already a candidate, use it. */
10287 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10288 best_arch != NULL;
10289 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10290 {
10291 if (arm_abi != ARM_ABI_AUTO
10292 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10293 continue;
10294
10295 if (fp_model != ARM_FLOAT_AUTO
10296 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10297 continue;
10298
10299 /* There are various other properties in tdep that we do not
10300 need to check here: those derived from a target description,
10301 since gdbarches with a different target description are
10302 automatically disqualified. */
10303
10304 /* Do check is_m, though, since it might come from the binary. */
10305 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10306 continue;
10307
10308 /* Found a match. */
10309 break;
10310 }
10311
10312 if (best_arch != NULL)
10313 {
10314 if (tdesc_data != NULL)
10315 tdesc_data_cleanup (tdesc_data);
10316 return best_arch->gdbarch;
10317 }
10318
10319 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10320 gdbarch = gdbarch_alloc (&info, tdep);
10321
10322 /* Record additional information about the architecture we are defining.
10323 These are gdbarch discriminators, like the OSABI. */
10324 tdep->arm_abi = arm_abi;
10325 tdep->fp_model = fp_model;
10326 tdep->is_m = is_m;
10327 tdep->have_fpa_registers = have_fpa_registers;
10328 tdep->have_vfp_registers = have_vfp_registers;
10329 tdep->have_vfp_pseudos = have_vfp_pseudos;
10330 tdep->have_neon_pseudos = have_neon_pseudos;
10331 tdep->have_neon = have_neon;
10332
10333 arm_register_g_packet_guesses (gdbarch);
10334
10335 /* Breakpoints. */
10336 switch (info.byte_order_for_code)
10337 {
10338 case BFD_ENDIAN_BIG:
10339 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10340 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10341 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10342 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10343
10344 break;
10345
10346 case BFD_ENDIAN_LITTLE:
10347 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10348 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10349 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10350 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10351
10352 break;
10353
10354 default:
10355 internal_error (__FILE__, __LINE__,
10356 _("arm_gdbarch_init: bad byte order for float format"));
10357 }
10358
10359 /* On ARM targets char defaults to unsigned. */
10360 set_gdbarch_char_signed (gdbarch, 0);
10361
10362 /* Note: for displaced stepping, this includes the breakpoint, and one word
10363 of additional scratch space. This setting isn't used for anything beside
10364 displaced stepping at present. */
10365 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10366
10367 /* This should be low enough for everything. */
10368 tdep->lowest_pc = 0x20;
10369 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10370
10371 /* The default, for both APCS and AAPCS, is to return small
10372 structures in registers. */
10373 tdep->struct_return = reg_struct_return;
10374
10375 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10376 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10377
10378 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10379
10380 /* Frame handling. */
10381 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10382 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10383 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10384
10385 frame_base_set_default (gdbarch, &arm_normal_base);
10386
10387 /* Address manipulation. */
10388 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10389
10390 /* Advance PC across function entry code. */
10391 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10392
10393 /* Detect whether PC is in function epilogue. */
10394 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10395
10396 /* Skip trampolines. */
10397 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10398
10399 /* The stack grows downward. */
10400 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10401
10402 /* Breakpoint manipulation. */
10403 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10404 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10405 arm_remote_breakpoint_from_pc);
10406
10407 /* Information about registers, etc. */
10408 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10409 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10410 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10411 set_gdbarch_register_type (gdbarch, arm_register_type);
10412 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10413
10414 /* This "info float" is FPA-specific. Use the generic version if we
10415 do not have FPA. */
10416 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10417 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10418
10419 /* Internal <-> external register number maps. */
10420 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10421 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10422
10423 set_gdbarch_register_name (gdbarch, arm_register_name);
10424
10425 /* Returning results. */
10426 set_gdbarch_return_value (gdbarch, arm_return_value);
10427
10428 /* Disassembly. */
10429 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10430
10431 /* Minsymbol frobbing. */
10432 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10433 set_gdbarch_coff_make_msymbol_special (gdbarch,
10434 arm_coff_make_msymbol_special);
10435 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10436
10437 /* Thumb-2 IT block support. */
10438 set_gdbarch_adjust_breakpoint_address (gdbarch,
10439 arm_adjust_breakpoint_address);
10440
10441 /* Virtual tables. */
10442 set_gdbarch_vbit_in_delta (gdbarch, 1);
10443
10444 /* Hook in the ABI-specific overrides, if they have been registered. */
10445 gdbarch_init_osabi (info, gdbarch);
10446
10447 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10448
10449 /* Add some default predicates. */
10450 if (is_m)
10451 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10452 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10453 dwarf2_append_unwinders (gdbarch);
10454 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10455 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10456
10457 /* Now we have tuned the configuration, set a few final things,
10458 based on what the OS ABI has told us. */
10459
10460 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10461 binaries are always marked. */
10462 if (tdep->arm_abi == ARM_ABI_AUTO)
10463 tdep->arm_abi = ARM_ABI_APCS;
10464
10465 /* Watchpoints are not steppable. */
10466 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10467
10468 /* We used to default to FPA for generic ARM, but almost nobody
10469 uses that now, and we now provide a way for the user to force
10470 the model. So default to the most useful variant. */
10471 if (tdep->fp_model == ARM_FLOAT_AUTO)
10472 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10473
10474 if (tdep->jb_pc >= 0)
10475 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10476
10477 /* Floating point sizes and format. */
10478 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10479 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10480 {
10481 set_gdbarch_double_format
10482 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10483 set_gdbarch_long_double_format
10484 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10485 }
10486 else
10487 {
10488 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10489 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10490 }
10491
10492 if (have_vfp_pseudos)
10493 {
10494 /* NOTE: These are the only pseudo registers used by
10495 the ARM target at the moment. If more are added, a
10496 little more care in numbering will be needed. */
10497
10498 int num_pseudos = 32;
10499 if (have_neon_pseudos)
10500 num_pseudos += 16;
10501 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10502 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10503 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10504 }
10505
10506 if (tdesc_data)
10507 {
10508 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10509
10510 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10511
10512 /* Override tdesc_register_type to adjust the types of VFP
10513 registers for NEON. */
10514 set_gdbarch_register_type (gdbarch, arm_register_type);
10515 }
10516
10517 /* Add standard register aliases. We add aliases even for those
10518 nanes which are used by the current architecture - it's simpler,
10519 and does no harm, since nothing ever lists user registers. */
10520 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10521 user_reg_add (gdbarch, arm_register_aliases[i].name,
10522 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10523
10524 return gdbarch;
10525 }
10526
10527 static void
10528 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10529 {
10530 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10531
10532 if (tdep == NULL)
10533 return;
10534
10535 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10536 (unsigned long) tdep->lowest_pc);
10537 }
10538
10539 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10540
10541 void
10542 _initialize_arm_tdep (void)
10543 {
10544 struct ui_file *stb;
10545 long length;
10546 struct cmd_list_element *new_set, *new_show;
10547 const char *setname;
10548 const char *setdesc;
10549 const char *const *regnames;
10550 int numregs, i, j;
10551 static char *helptext;
10552 char regdesc[1024], *rdptr = regdesc;
10553 size_t rest = sizeof (regdesc);
10554
10555 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10556
10557 arm_objfile_data_key
10558 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10559
10560 /* Add ourselves to objfile event chain. */
10561 observer_attach_new_objfile (arm_exidx_new_objfile);
10562 arm_exidx_data_key
10563 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10564
10565 /* Register an ELF OS ABI sniffer for ARM binaries. */
10566 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10567 bfd_target_elf_flavour,
10568 arm_elf_osabi_sniffer);
10569
10570 /* Initialize the standard target descriptions. */
10571 initialize_tdesc_arm_with_m ();
10572 initialize_tdesc_arm_with_m_fpa_layout ();
10573 initialize_tdesc_arm_with_m_vfp_d16 ();
10574 initialize_tdesc_arm_with_iwmmxt ();
10575 initialize_tdesc_arm_with_vfpv2 ();
10576 initialize_tdesc_arm_with_vfpv3 ();
10577 initialize_tdesc_arm_with_neon ();
10578
10579 /* Get the number of possible sets of register names defined in opcodes. */
10580 num_disassembly_options = get_arm_regname_num_options ();
10581
10582 /* Add root prefix command for all "set arm"/"show arm" commands. */
10583 add_prefix_cmd ("arm", no_class, set_arm_command,
10584 _("Various ARM-specific commands."),
10585 &setarmcmdlist, "set arm ", 0, &setlist);
10586
10587 add_prefix_cmd ("arm", no_class, show_arm_command,
10588 _("Various ARM-specific commands."),
10589 &showarmcmdlist, "show arm ", 0, &showlist);
10590
10591 /* Sync the opcode insn printer with our register viewer. */
10592 parse_arm_disassembler_option ("reg-names-std");
10593
10594 /* Initialize the array that will be passed to
10595 add_setshow_enum_cmd(). */
10596 valid_disassembly_styles
10597 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10598 for (i = 0; i < num_disassembly_options; i++)
10599 {
10600 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10601 valid_disassembly_styles[i] = setname;
10602 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10603 rdptr += length;
10604 rest -= length;
10605 /* When we find the default names, tell the disassembler to use
10606 them. */
10607 if (!strcmp (setname, "std"))
10608 {
10609 disassembly_style = setname;
10610 set_arm_regname_option (i);
10611 }
10612 }
10613 /* Mark the end of valid options. */
10614 valid_disassembly_styles[num_disassembly_options] = NULL;
10615
10616 /* Create the help text. */
10617 stb = mem_fileopen ();
10618 fprintf_unfiltered (stb, "%s%s%s",
10619 _("The valid values are:\n"),
10620 regdesc,
10621 _("The default is \"std\"."));
10622 helptext = ui_file_xstrdup (stb, NULL);
10623 ui_file_delete (stb);
10624
10625 add_setshow_enum_cmd("disassembler", no_class,
10626 valid_disassembly_styles, &disassembly_style,
10627 _("Set the disassembly style."),
10628 _("Show the disassembly style."),
10629 helptext,
10630 set_disassembly_style_sfunc,
10631 NULL, /* FIXME: i18n: The disassembly style is
10632 \"%s\". */
10633 &setarmcmdlist, &showarmcmdlist);
10634
10635 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10636 _("Set usage of ARM 32-bit mode."),
10637 _("Show usage of ARM 32-bit mode."),
10638 _("When off, a 26-bit PC will be used."),
10639 NULL,
10640 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10641 mode is %s. */
10642 &setarmcmdlist, &showarmcmdlist);
10643
10644 /* Add a command to allow the user to force the FPU model. */
10645 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10646 _("Set the floating point type."),
10647 _("Show the floating point type."),
10648 _("auto - Determine the FP typefrom the OS-ABI.\n\
10649 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10650 fpa - FPA co-processor (GCC compiled).\n\
10651 softvfp - Software FP with pure-endian doubles.\n\
10652 vfp - VFP co-processor."),
10653 set_fp_model_sfunc, show_fp_model,
10654 &setarmcmdlist, &showarmcmdlist);
10655
10656 /* Add a command to allow the user to force the ABI. */
10657 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10658 _("Set the ABI."),
10659 _("Show the ABI."),
10660 NULL, arm_set_abi, arm_show_abi,
10661 &setarmcmdlist, &showarmcmdlist);
10662
10663 /* Add two commands to allow the user to force the assumed
10664 execution mode. */
10665 add_setshow_enum_cmd ("fallback-mode", class_support,
10666 arm_mode_strings, &arm_fallback_mode_string,
10667 _("Set the mode assumed when symbols are unavailable."),
10668 _("Show the mode assumed when symbols are unavailable."),
10669 NULL, NULL, arm_show_fallback_mode,
10670 &setarmcmdlist, &showarmcmdlist);
10671 add_setshow_enum_cmd ("force-mode", class_support,
10672 arm_mode_strings, &arm_force_mode_string,
10673 _("Set the mode assumed even when symbols are available."),
10674 _("Show the mode assumed even when symbols are available."),
10675 NULL, NULL, arm_show_force_mode,
10676 &setarmcmdlist, &showarmcmdlist);
10677
10678 /* Debugging flag. */
10679 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10680 _("Set ARM debugging."),
10681 _("Show ARM debugging."),
10682 _("When on, arm-specific debugging is enabled."),
10683 NULL,
10684 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10685 &setdebuglist, &showdebuglist);
10686 }
10687
10688 /* ARM-reversible process record data structures. */
10689
10690 #define ARM_INSN_SIZE_BYTES 4
10691 #define THUMB_INSN_SIZE_BYTES 2
10692 #define THUMB2_INSN_SIZE_BYTES 4
10693
10694
10695 /* Position of the bit within a 32-bit ARM instruction
10696 that defines whether the instruction is a load or store. */
10697 #define INSN_S_L_BIT_NUM 20
10698
10699 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10700 do \
10701 { \
10702 unsigned int reg_len = LENGTH; \
10703 if (reg_len) \
10704 { \
10705 REGS = XNEWVEC (uint32_t, reg_len); \
10706 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10707 } \
10708 } \
10709 while (0)
10710
10711 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10712 do \
10713 { \
10714 unsigned int mem_len = LENGTH; \
10715 if (mem_len) \
10716 { \
10717 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10718 memcpy(&MEMS->len, &RECORD_BUF[0], \
10719 sizeof(struct arm_mem_r) * LENGTH); \
10720 } \
10721 } \
10722 while (0)
10723
10724 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10725 #define INSN_RECORDED(ARM_RECORD) \
10726 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10727
10728 /* ARM memory record structure. */
10729 struct arm_mem_r
10730 {
10731 uint32_t len; /* Record length. */
10732 uint32_t addr; /* Memory address. */
10733 };
10734
10735 /* ARM instruction record contains opcode of current insn
10736 and execution state (before entry to decode_insn()),
10737 contains list of to-be-modified registers and
10738 memory blocks (on return from decode_insn()). */
10739
10740 typedef struct insn_decode_record_t
10741 {
10742 struct gdbarch *gdbarch;
10743 struct regcache *regcache;
10744 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10745 uint32_t arm_insn; /* Should accommodate thumb. */
10746 uint32_t cond; /* Condition code. */
10747 uint32_t opcode; /* Insn opcode. */
10748 uint32_t decode; /* Insn decode bits. */
10749 uint32_t mem_rec_count; /* No of mem records. */
10750 uint32_t reg_rec_count; /* No of reg records. */
10751 uint32_t *arm_regs; /* Registers to be saved for this record. */
10752 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10753 } insn_decode_record;
10754
10755
10756 /* Checks ARM SBZ and SBO mandatory fields. */
10757
10758 static int
10759 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10760 {
10761 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10762
10763 if (!len)
10764 return 1;
10765
10766 if (!sbo)
10767 ones = ~ones;
10768
10769 while (ones)
10770 {
10771 if (!(ones & sbo))
10772 {
10773 return 0;
10774 }
10775 ones = ones >> 1;
10776 }
10777 return 1;
10778 }
10779
10780 enum arm_record_result
10781 {
10782 ARM_RECORD_SUCCESS = 0,
10783 ARM_RECORD_FAILURE = 1
10784 };
10785
10786 typedef enum
10787 {
10788 ARM_RECORD_STRH=1,
10789 ARM_RECORD_STRD
10790 } arm_record_strx_t;
10791
10792 typedef enum
10793 {
10794 ARM_RECORD=1,
10795 THUMB_RECORD,
10796 THUMB2_RECORD
10797 } record_type_t;
10798
10799
10800 static int
10801 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10802 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10803 {
10804
10805 struct regcache *reg_cache = arm_insn_r->regcache;
10806 ULONGEST u_regval[2]= {0};
10807
10808 uint32_t reg_src1 = 0, reg_src2 = 0;
10809 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10810 uint32_t opcode1 = 0;
10811
10812 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10813 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10814 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10815
10816
10817 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10818 {
10819 /* 1) Handle misc store, immediate offset. */
10820 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10821 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10822 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10823 regcache_raw_read_unsigned (reg_cache, reg_src1,
10824 &u_regval[0]);
10825 if (ARM_PC_REGNUM == reg_src1)
10826 {
10827 /* If R15 was used as Rn, hence current PC+8. */
10828 u_regval[0] = u_regval[0] + 8;
10829 }
10830 offset_8 = (immed_high << 4) | immed_low;
10831 /* Calculate target store address. */
10832 if (14 == arm_insn_r->opcode)
10833 {
10834 tgt_mem_addr = u_regval[0] + offset_8;
10835 }
10836 else
10837 {
10838 tgt_mem_addr = u_regval[0] - offset_8;
10839 }
10840 if (ARM_RECORD_STRH == str_type)
10841 {
10842 record_buf_mem[0] = 2;
10843 record_buf_mem[1] = tgt_mem_addr;
10844 arm_insn_r->mem_rec_count = 1;
10845 }
10846 else if (ARM_RECORD_STRD == str_type)
10847 {
10848 record_buf_mem[0] = 4;
10849 record_buf_mem[1] = tgt_mem_addr;
10850 record_buf_mem[2] = 4;
10851 record_buf_mem[3] = tgt_mem_addr + 4;
10852 arm_insn_r->mem_rec_count = 2;
10853 }
10854 }
10855 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10856 {
10857 /* 2) Store, register offset. */
10858 /* Get Rm. */
10859 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10860 /* Get Rn. */
10861 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10862 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10863 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10864 if (15 == reg_src2)
10865 {
10866 /* If R15 was used as Rn, hence current PC+8. */
10867 u_regval[0] = u_regval[0] + 8;
10868 }
10869 /* Calculate target store address, Rn +/- Rm, register offset. */
10870 if (12 == arm_insn_r->opcode)
10871 {
10872 tgt_mem_addr = u_regval[0] + u_regval[1];
10873 }
10874 else
10875 {
10876 tgt_mem_addr = u_regval[1] - u_regval[0];
10877 }
10878 if (ARM_RECORD_STRH == str_type)
10879 {
10880 record_buf_mem[0] = 2;
10881 record_buf_mem[1] = tgt_mem_addr;
10882 arm_insn_r->mem_rec_count = 1;
10883 }
10884 else if (ARM_RECORD_STRD == str_type)
10885 {
10886 record_buf_mem[0] = 4;
10887 record_buf_mem[1] = tgt_mem_addr;
10888 record_buf_mem[2] = 4;
10889 record_buf_mem[3] = tgt_mem_addr + 4;
10890 arm_insn_r->mem_rec_count = 2;
10891 }
10892 }
10893 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10894 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10895 {
10896 /* 3) Store, immediate pre-indexed. */
10897 /* 5) Store, immediate post-indexed. */
10898 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10899 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10900 offset_8 = (immed_high << 4) | immed_low;
10901 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10902 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10903 /* Calculate target store address, Rn +/- Rm, register offset. */
10904 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10905 {
10906 tgt_mem_addr = u_regval[0] + offset_8;
10907 }
10908 else
10909 {
10910 tgt_mem_addr = u_regval[0] - offset_8;
10911 }
10912 if (ARM_RECORD_STRH == str_type)
10913 {
10914 record_buf_mem[0] = 2;
10915 record_buf_mem[1] = tgt_mem_addr;
10916 arm_insn_r->mem_rec_count = 1;
10917 }
10918 else if (ARM_RECORD_STRD == str_type)
10919 {
10920 record_buf_mem[0] = 4;
10921 record_buf_mem[1] = tgt_mem_addr;
10922 record_buf_mem[2] = 4;
10923 record_buf_mem[3] = tgt_mem_addr + 4;
10924 arm_insn_r->mem_rec_count = 2;
10925 }
10926 /* Record Rn also as it changes. */
10927 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10928 arm_insn_r->reg_rec_count = 1;
10929 }
10930 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10931 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10932 {
10933 /* 4) Store, register pre-indexed. */
10934 /* 6) Store, register post -indexed. */
10935 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10936 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10937 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10938 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10939 /* Calculate target store address, Rn +/- Rm, register offset. */
10940 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10941 {
10942 tgt_mem_addr = u_regval[0] + u_regval[1];
10943 }
10944 else
10945 {
10946 tgt_mem_addr = u_regval[1] - u_regval[0];
10947 }
10948 if (ARM_RECORD_STRH == str_type)
10949 {
10950 record_buf_mem[0] = 2;
10951 record_buf_mem[1] = tgt_mem_addr;
10952 arm_insn_r->mem_rec_count = 1;
10953 }
10954 else if (ARM_RECORD_STRD == str_type)
10955 {
10956 record_buf_mem[0] = 4;
10957 record_buf_mem[1] = tgt_mem_addr;
10958 record_buf_mem[2] = 4;
10959 record_buf_mem[3] = tgt_mem_addr + 4;
10960 arm_insn_r->mem_rec_count = 2;
10961 }
10962 /* Record Rn also as it changes. */
10963 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10964 arm_insn_r->reg_rec_count = 1;
10965 }
10966 return 0;
10967 }
10968
10969 /* Handling ARM extension space insns. */
10970
10971 static int
10972 arm_record_extension_space (insn_decode_record *arm_insn_r)
10973 {
10974 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10975 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10976 uint32_t record_buf[8], record_buf_mem[8];
10977 uint32_t reg_src1 = 0;
10978 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10979 struct regcache *reg_cache = arm_insn_r->regcache;
10980 ULONGEST u_regval = 0;
10981
10982 gdb_assert (!INSN_RECORDED(arm_insn_r));
10983 /* Handle unconditional insn extension space. */
10984
10985 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10986 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10987 if (arm_insn_r->cond)
10988 {
10989 /* PLD has no affect on architectural state, it just affects
10990 the caches. */
10991 if (5 == ((opcode1 & 0xE0) >> 5))
10992 {
10993 /* BLX(1) */
10994 record_buf[0] = ARM_PS_REGNUM;
10995 record_buf[1] = ARM_LR_REGNUM;
10996 arm_insn_r->reg_rec_count = 2;
10997 }
10998 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10999 }
11000
11001
11002 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11003 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11004 {
11005 ret = -1;
11006 /* Undefined instruction on ARM V5; need to handle if later
11007 versions define it. */
11008 }
11009
11010 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11011 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11012 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11013
11014 /* Handle arithmetic insn extension space. */
11015 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11016 && !INSN_RECORDED(arm_insn_r))
11017 {
11018 /* Handle MLA(S) and MUL(S). */
11019 if (0 <= insn_op1 && 3 >= insn_op1)
11020 {
11021 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11022 record_buf[1] = ARM_PS_REGNUM;
11023 arm_insn_r->reg_rec_count = 2;
11024 }
11025 else if (4 <= insn_op1 && 15 >= insn_op1)
11026 {
11027 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11028 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11029 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11030 record_buf[2] = ARM_PS_REGNUM;
11031 arm_insn_r->reg_rec_count = 3;
11032 }
11033 }
11034
11035 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11036 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11037 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11038
11039 /* Handle control insn extension space. */
11040
11041 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11042 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11043 {
11044 if (!bit (arm_insn_r->arm_insn,25))
11045 {
11046 if (!bits (arm_insn_r->arm_insn, 4, 7))
11047 {
11048 if ((0 == insn_op1) || (2 == insn_op1))
11049 {
11050 /* MRS. */
11051 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11052 arm_insn_r->reg_rec_count = 1;
11053 }
11054 else if (1 == insn_op1)
11055 {
11056 /* CSPR is going to be changed. */
11057 record_buf[0] = ARM_PS_REGNUM;
11058 arm_insn_r->reg_rec_count = 1;
11059 }
11060 else if (3 == insn_op1)
11061 {
11062 /* SPSR is going to be changed. */
11063 /* We need to get SPSR value, which is yet to be done. */
11064 printf_unfiltered (_("Process record does not support "
11065 "instruction 0x%0x at address %s.\n"),
11066 arm_insn_r->arm_insn,
11067 paddress (arm_insn_r->gdbarch,
11068 arm_insn_r->this_addr));
11069 return -1;
11070 }
11071 }
11072 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11073 {
11074 if (1 == insn_op1)
11075 {
11076 /* BX. */
11077 record_buf[0] = ARM_PS_REGNUM;
11078 arm_insn_r->reg_rec_count = 1;
11079 }
11080 else if (3 == insn_op1)
11081 {
11082 /* CLZ. */
11083 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11084 arm_insn_r->reg_rec_count = 1;
11085 }
11086 }
11087 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11088 {
11089 /* BLX. */
11090 record_buf[0] = ARM_PS_REGNUM;
11091 record_buf[1] = ARM_LR_REGNUM;
11092 arm_insn_r->reg_rec_count = 2;
11093 }
11094 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11095 {
11096 /* QADD, QSUB, QDADD, QDSUB */
11097 record_buf[0] = ARM_PS_REGNUM;
11098 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11099 arm_insn_r->reg_rec_count = 2;
11100 }
11101 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11102 {
11103 /* BKPT. */
11104 record_buf[0] = ARM_PS_REGNUM;
11105 record_buf[1] = ARM_LR_REGNUM;
11106 arm_insn_r->reg_rec_count = 2;
11107
11108 /* Save SPSR also;how? */
11109 printf_unfiltered (_("Process record does not support "
11110 "instruction 0x%0x at address %s.\n"),
11111 arm_insn_r->arm_insn,
11112 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11113 return -1;
11114 }
11115 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11116 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11117 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11118 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11119 )
11120 {
11121 if (0 == insn_op1 || 1 == insn_op1)
11122 {
11123 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11124 /* We dont do optimization for SMULW<y> where we
11125 need only Rd. */
11126 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11127 record_buf[1] = ARM_PS_REGNUM;
11128 arm_insn_r->reg_rec_count = 2;
11129 }
11130 else if (2 == insn_op1)
11131 {
11132 /* SMLAL<x><y>. */
11133 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11134 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11135 arm_insn_r->reg_rec_count = 2;
11136 }
11137 else if (3 == insn_op1)
11138 {
11139 /* SMUL<x><y>. */
11140 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11141 arm_insn_r->reg_rec_count = 1;
11142 }
11143 }
11144 }
11145 else
11146 {
11147 /* MSR : immediate form. */
11148 if (1 == insn_op1)
11149 {
11150 /* CSPR is going to be changed. */
11151 record_buf[0] = ARM_PS_REGNUM;
11152 arm_insn_r->reg_rec_count = 1;
11153 }
11154 else if (3 == insn_op1)
11155 {
11156 /* SPSR is going to be changed. */
11157 /* we need to get SPSR value, which is yet to be done */
11158 printf_unfiltered (_("Process record does not support "
11159 "instruction 0x%0x at address %s.\n"),
11160 arm_insn_r->arm_insn,
11161 paddress (arm_insn_r->gdbarch,
11162 arm_insn_r->this_addr));
11163 return -1;
11164 }
11165 }
11166 }
11167
11168 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11169 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11170 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11171
11172 /* Handle load/store insn extension space. */
11173
11174 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11175 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11176 && !INSN_RECORDED(arm_insn_r))
11177 {
11178 /* SWP/SWPB. */
11179 if (0 == insn_op1)
11180 {
11181 /* These insn, changes register and memory as well. */
11182 /* SWP or SWPB insn. */
11183 /* Get memory address given by Rn. */
11184 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11185 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11186 /* SWP insn ?, swaps word. */
11187 if (8 == arm_insn_r->opcode)
11188 {
11189 record_buf_mem[0] = 4;
11190 }
11191 else
11192 {
11193 /* SWPB insn, swaps only byte. */
11194 record_buf_mem[0] = 1;
11195 }
11196 record_buf_mem[1] = u_regval;
11197 arm_insn_r->mem_rec_count = 1;
11198 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11199 arm_insn_r->reg_rec_count = 1;
11200 }
11201 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11202 {
11203 /* STRH. */
11204 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11205 ARM_RECORD_STRH);
11206 }
11207 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11208 {
11209 /* LDRD. */
11210 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11211 record_buf[1] = record_buf[0] + 1;
11212 arm_insn_r->reg_rec_count = 2;
11213 }
11214 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11215 {
11216 /* STRD. */
11217 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11218 ARM_RECORD_STRD);
11219 }
11220 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11221 {
11222 /* LDRH, LDRSB, LDRSH. */
11223 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11224 arm_insn_r->reg_rec_count = 1;
11225 }
11226
11227 }
11228
11229 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11230 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11231 && !INSN_RECORDED(arm_insn_r))
11232 {
11233 ret = -1;
11234 /* Handle coprocessor insn extension space. */
11235 }
11236
11237 /* To be done for ARMv5 and later; as of now we return -1. */
11238 if (-1 == ret)
11239 printf_unfiltered (_("Process record does not support instruction x%0x "
11240 "at address %s.\n"),arm_insn_r->arm_insn,
11241 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11242
11243
11244 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11245 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11246
11247 return ret;
11248 }
11249
11250 /* Handling opcode 000 insns. */
11251
11252 static int
11253 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11254 {
11255 struct regcache *reg_cache = arm_insn_r->regcache;
11256 uint32_t record_buf[8], record_buf_mem[8];
11257 ULONGEST u_regval[2] = {0};
11258
11259 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11260 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11261 uint32_t opcode1 = 0;
11262
11263 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11264 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11265 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11266
11267 /* Data processing insn /multiply insn. */
11268 if (9 == arm_insn_r->decode
11269 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11270 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11271 {
11272 /* Handle multiply instructions. */
11273 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11274 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11275 {
11276 /* Handle MLA and MUL. */
11277 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11278 record_buf[1] = ARM_PS_REGNUM;
11279 arm_insn_r->reg_rec_count = 2;
11280 }
11281 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11282 {
11283 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11284 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11285 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11286 record_buf[2] = ARM_PS_REGNUM;
11287 arm_insn_r->reg_rec_count = 3;
11288 }
11289 }
11290 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11291 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11292 {
11293 /* Handle misc load insns, as 20th bit (L = 1). */
11294 /* LDR insn has a capability to do branching, if
11295 MOV LR, PC is precceded by LDR insn having Rn as R15
11296 in that case, it emulates branch and link insn, and hence we
11297 need to save CSPR and PC as well. I am not sure this is right
11298 place; as opcode = 010 LDR insn make this happen, if R15 was
11299 used. */
11300 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11301 if (15 != reg_dest)
11302 {
11303 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11304 arm_insn_r->reg_rec_count = 1;
11305 }
11306 else
11307 {
11308 record_buf[0] = reg_dest;
11309 record_buf[1] = ARM_PS_REGNUM;
11310 arm_insn_r->reg_rec_count = 2;
11311 }
11312 }
11313 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11314 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11315 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11316 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11317 {
11318 /* Handle MSR insn. */
11319 if (9 == arm_insn_r->opcode)
11320 {
11321 /* CSPR is going to be changed. */
11322 record_buf[0] = ARM_PS_REGNUM;
11323 arm_insn_r->reg_rec_count = 1;
11324 }
11325 else
11326 {
11327 /* SPSR is going to be changed. */
11328 /* How to read SPSR value? */
11329 printf_unfiltered (_("Process record does not support instruction "
11330 "0x%0x at address %s.\n"),
11331 arm_insn_r->arm_insn,
11332 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11333 return -1;
11334 }
11335 }
11336 else if (9 == arm_insn_r->decode
11337 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11338 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11339 {
11340 /* Handling SWP, SWPB. */
11341 /* These insn, changes register and memory as well. */
11342 /* SWP or SWPB insn. */
11343
11344 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11345 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11346 /* SWP insn ?, swaps word. */
11347 if (8 == arm_insn_r->opcode)
11348 {
11349 record_buf_mem[0] = 4;
11350 }
11351 else
11352 {
11353 /* SWPB insn, swaps only byte. */
11354 record_buf_mem[0] = 1;
11355 }
11356 record_buf_mem[1] = u_regval[0];
11357 arm_insn_r->mem_rec_count = 1;
11358 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11359 arm_insn_r->reg_rec_count = 1;
11360 }
11361 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11362 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11363 {
11364 /* Handle BLX, branch and link/exchange. */
11365 if (9 == arm_insn_r->opcode)
11366 {
11367 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11368 and R14 stores the return address. */
11369 record_buf[0] = ARM_PS_REGNUM;
11370 record_buf[1] = ARM_LR_REGNUM;
11371 arm_insn_r->reg_rec_count = 2;
11372 }
11373 }
11374 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11375 {
11376 /* Handle enhanced software breakpoint insn, BKPT. */
11377 /* CPSR is changed to be executed in ARM state, disabling normal
11378 interrupts, entering abort mode. */
11379 /* According to high vector configuration PC is set. */
11380 /* user hit breakpoint and type reverse, in
11381 that case, we need to go back with previous CPSR and
11382 Program Counter. */
11383 record_buf[0] = ARM_PS_REGNUM;
11384 record_buf[1] = ARM_LR_REGNUM;
11385 arm_insn_r->reg_rec_count = 2;
11386
11387 /* Save SPSR also; how? */
11388 printf_unfiltered (_("Process record does not support instruction "
11389 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11390 paddress (arm_insn_r->gdbarch,
11391 arm_insn_r->this_addr));
11392 return -1;
11393 }
11394 else if (11 == arm_insn_r->decode
11395 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11396 {
11397 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11398
11399 /* Handle str(x) insn */
11400 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11401 ARM_RECORD_STRH);
11402 }
11403 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11404 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11405 {
11406 /* Handle BX, branch and link/exchange. */
11407 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11408 record_buf[0] = ARM_PS_REGNUM;
11409 arm_insn_r->reg_rec_count = 1;
11410 }
11411 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11412 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11413 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11414 {
11415 /* Count leading zeros: CLZ. */
11416 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11417 arm_insn_r->reg_rec_count = 1;
11418 }
11419 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11420 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11421 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11422 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11423 )
11424 {
11425 /* Handle MRS insn. */
11426 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11427 arm_insn_r->reg_rec_count = 1;
11428 }
11429 else if (arm_insn_r->opcode <= 15)
11430 {
11431 /* Normal data processing insns. */
11432 /* Out of 11 shifter operands mode, all the insn modifies destination
11433 register, which is specified by 13-16 decode. */
11434 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11435 record_buf[1] = ARM_PS_REGNUM;
11436 arm_insn_r->reg_rec_count = 2;
11437 }
11438 else
11439 {
11440 return -1;
11441 }
11442
11443 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11444 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11445 return 0;
11446 }
11447
11448 /* Handling opcode 001 insns. */
11449
11450 static int
11451 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11452 {
11453 uint32_t record_buf[8], record_buf_mem[8];
11454
11455 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11456 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11457
11458 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11459 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11460 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11461 )
11462 {
11463 /* Handle MSR insn. */
11464 if (9 == arm_insn_r->opcode)
11465 {
11466 /* CSPR is going to be changed. */
11467 record_buf[0] = ARM_PS_REGNUM;
11468 arm_insn_r->reg_rec_count = 1;
11469 }
11470 else
11471 {
11472 /* SPSR is going to be changed. */
11473 }
11474 }
11475 else if (arm_insn_r->opcode <= 15)
11476 {
11477 /* Normal data processing insns. */
11478 /* Out of 11 shifter operands mode, all the insn modifies destination
11479 register, which is specified by 13-16 decode. */
11480 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11481 record_buf[1] = ARM_PS_REGNUM;
11482 arm_insn_r->reg_rec_count = 2;
11483 }
11484 else
11485 {
11486 return -1;
11487 }
11488
11489 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11490 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11491 return 0;
11492 }
11493
11494 /* Handle ARM mode instructions with opcode 010. */
11495
11496 static int
11497 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11498 {
11499 struct regcache *reg_cache = arm_insn_r->regcache;
11500
11501 uint32_t reg_base , reg_dest;
11502 uint32_t offset_12, tgt_mem_addr;
11503 uint32_t record_buf[8], record_buf_mem[8];
11504 unsigned char wback;
11505 ULONGEST u_regval;
11506
11507 /* Calculate wback. */
11508 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11509 || (bit (arm_insn_r->arm_insn, 21) == 1);
11510
11511 arm_insn_r->reg_rec_count = 0;
11512 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11513
11514 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11515 {
11516 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11517 and LDRT. */
11518
11519 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11520 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11521
11522 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11523 preceeds a LDR instruction having R15 as reg_base, it
11524 emulates a branch and link instruction, and hence we need to save
11525 CPSR and PC as well. */
11526 if (ARM_PC_REGNUM == reg_dest)
11527 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11528
11529 /* If wback is true, also save the base register, which is going to be
11530 written to. */
11531 if (wback)
11532 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11533 }
11534 else
11535 {
11536 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11537
11538 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11539 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11540
11541 /* Handle bit U. */
11542 if (bit (arm_insn_r->arm_insn, 23))
11543 {
11544 /* U == 1: Add the offset. */
11545 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11546 }
11547 else
11548 {
11549 /* U == 0: subtract the offset. */
11550 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11551 }
11552
11553 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11554 bytes. */
11555 if (bit (arm_insn_r->arm_insn, 22))
11556 {
11557 /* STRB and STRBT: 1 byte. */
11558 record_buf_mem[0] = 1;
11559 }
11560 else
11561 {
11562 /* STR and STRT: 4 bytes. */
11563 record_buf_mem[0] = 4;
11564 }
11565
11566 /* Handle bit P. */
11567 if (bit (arm_insn_r->arm_insn, 24))
11568 record_buf_mem[1] = tgt_mem_addr;
11569 else
11570 record_buf_mem[1] = (uint32_t) u_regval;
11571
11572 arm_insn_r->mem_rec_count = 1;
11573
11574 /* If wback is true, also save the base register, which is going to be
11575 written to. */
11576 if (wback)
11577 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11578 }
11579
11580 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11581 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11582 return 0;
11583 }
11584
11585 /* Handling opcode 011 insns. */
11586
11587 static int
11588 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11589 {
11590 struct regcache *reg_cache = arm_insn_r->regcache;
11591
11592 uint32_t shift_imm = 0;
11593 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11594 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11595 uint32_t record_buf[8], record_buf_mem[8];
11596
11597 LONGEST s_word;
11598 ULONGEST u_regval[2];
11599
11600 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11601 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11602
11603 /* Handle enhanced store insns and LDRD DSP insn,
11604 order begins according to addressing modes for store insns
11605 STRH insn. */
11606
11607 /* LDR or STR? */
11608 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11609 {
11610 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11611 /* LDR insn has a capability to do branching, if
11612 MOV LR, PC is precedded by LDR insn having Rn as R15
11613 in that case, it emulates branch and link insn, and hence we
11614 need to save CSPR and PC as well. */
11615 if (15 != reg_dest)
11616 {
11617 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11618 arm_insn_r->reg_rec_count = 1;
11619 }
11620 else
11621 {
11622 record_buf[0] = reg_dest;
11623 record_buf[1] = ARM_PS_REGNUM;
11624 arm_insn_r->reg_rec_count = 2;
11625 }
11626 }
11627 else
11628 {
11629 if (! bits (arm_insn_r->arm_insn, 4, 11))
11630 {
11631 /* Store insn, register offset and register pre-indexed,
11632 register post-indexed. */
11633 /* Get Rm. */
11634 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11635 /* Get Rn. */
11636 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11637 regcache_raw_read_unsigned (reg_cache, reg_src1
11638 , &u_regval[0]);
11639 regcache_raw_read_unsigned (reg_cache, reg_src2
11640 , &u_regval[1]);
11641 if (15 == reg_src2)
11642 {
11643 /* If R15 was used as Rn, hence current PC+8. */
11644 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11645 u_regval[0] = u_regval[0] + 8;
11646 }
11647 /* Calculate target store address, Rn +/- Rm, register offset. */
11648 /* U == 1. */
11649 if (bit (arm_insn_r->arm_insn, 23))
11650 {
11651 tgt_mem_addr = u_regval[0] + u_regval[1];
11652 }
11653 else
11654 {
11655 tgt_mem_addr = u_regval[1] - u_regval[0];
11656 }
11657
11658 switch (arm_insn_r->opcode)
11659 {
11660 /* STR. */
11661 case 8:
11662 case 12:
11663 /* STR. */
11664 case 9:
11665 case 13:
11666 /* STRT. */
11667 case 1:
11668 case 5:
11669 /* STR. */
11670 case 0:
11671 case 4:
11672 record_buf_mem[0] = 4;
11673 break;
11674
11675 /* STRB. */
11676 case 10:
11677 case 14:
11678 /* STRB. */
11679 case 11:
11680 case 15:
11681 /* STRBT. */
11682 case 3:
11683 case 7:
11684 /* STRB. */
11685 case 2:
11686 case 6:
11687 record_buf_mem[0] = 1;
11688 break;
11689
11690 default:
11691 gdb_assert_not_reached ("no decoding pattern found");
11692 break;
11693 }
11694 record_buf_mem[1] = tgt_mem_addr;
11695 arm_insn_r->mem_rec_count = 1;
11696
11697 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11698 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11699 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11700 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11701 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11702 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11703 )
11704 {
11705 /* Rn is going to be changed in pre-indexed mode and
11706 post-indexed mode as well. */
11707 record_buf[0] = reg_src2;
11708 arm_insn_r->reg_rec_count = 1;
11709 }
11710 }
11711 else
11712 {
11713 /* Store insn, scaled register offset; scaled pre-indexed. */
11714 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11715 /* Get Rm. */
11716 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11717 /* Get Rn. */
11718 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11719 /* Get shift_imm. */
11720 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11721 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11722 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11723 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11724 /* Offset_12 used as shift. */
11725 switch (offset_12)
11726 {
11727 case 0:
11728 /* Offset_12 used as index. */
11729 offset_12 = u_regval[0] << shift_imm;
11730 break;
11731
11732 case 1:
11733 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11734 break;
11735
11736 case 2:
11737 if (!shift_imm)
11738 {
11739 if (bit (u_regval[0], 31))
11740 {
11741 offset_12 = 0xFFFFFFFF;
11742 }
11743 else
11744 {
11745 offset_12 = 0;
11746 }
11747 }
11748 else
11749 {
11750 /* This is arithmetic shift. */
11751 offset_12 = s_word >> shift_imm;
11752 }
11753 break;
11754
11755 case 3:
11756 if (!shift_imm)
11757 {
11758 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11759 &u_regval[1]);
11760 /* Get C flag value and shift it by 31. */
11761 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11762 | (u_regval[0]) >> 1);
11763 }
11764 else
11765 {
11766 offset_12 = (u_regval[0] >> shift_imm) \
11767 | (u_regval[0] <<
11768 (sizeof(uint32_t) - shift_imm));
11769 }
11770 break;
11771
11772 default:
11773 gdb_assert_not_reached ("no decoding pattern found");
11774 break;
11775 }
11776
11777 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11778 /* bit U set. */
11779 if (bit (arm_insn_r->arm_insn, 23))
11780 {
11781 tgt_mem_addr = u_regval[1] + offset_12;
11782 }
11783 else
11784 {
11785 tgt_mem_addr = u_regval[1] - offset_12;
11786 }
11787
11788 switch (arm_insn_r->opcode)
11789 {
11790 /* STR. */
11791 case 8:
11792 case 12:
11793 /* STR. */
11794 case 9:
11795 case 13:
11796 /* STRT. */
11797 case 1:
11798 case 5:
11799 /* STR. */
11800 case 0:
11801 case 4:
11802 record_buf_mem[0] = 4;
11803 break;
11804
11805 /* STRB. */
11806 case 10:
11807 case 14:
11808 /* STRB. */
11809 case 11:
11810 case 15:
11811 /* STRBT. */
11812 case 3:
11813 case 7:
11814 /* STRB. */
11815 case 2:
11816 case 6:
11817 record_buf_mem[0] = 1;
11818 break;
11819
11820 default:
11821 gdb_assert_not_reached ("no decoding pattern found");
11822 break;
11823 }
11824 record_buf_mem[1] = tgt_mem_addr;
11825 arm_insn_r->mem_rec_count = 1;
11826
11827 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11828 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11829 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11830 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11831 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11832 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11833 )
11834 {
11835 /* Rn is going to be changed in register scaled pre-indexed
11836 mode,and scaled post indexed mode. */
11837 record_buf[0] = reg_src2;
11838 arm_insn_r->reg_rec_count = 1;
11839 }
11840 }
11841 }
11842
11843 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11844 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11845 return 0;
11846 }
11847
11848 /* Handle ARM mode instructions with opcode 100. */
11849
11850 static int
11851 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11852 {
11853 struct regcache *reg_cache = arm_insn_r->regcache;
11854 uint32_t register_count = 0, register_bits;
11855 uint32_t reg_base, addr_mode;
11856 uint32_t record_buf[24], record_buf_mem[48];
11857 uint32_t wback;
11858 ULONGEST u_regval;
11859
11860 /* Fetch the list of registers. */
11861 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11862 arm_insn_r->reg_rec_count = 0;
11863
11864 /* Fetch the base register that contains the address we are loading data
11865 to. */
11866 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11867
11868 /* Calculate wback. */
11869 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11870
11871 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11872 {
11873 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11874
11875 /* Find out which registers are going to be loaded from memory. */
11876 while (register_bits)
11877 {
11878 if (register_bits & 0x00000001)
11879 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11880 register_bits = register_bits >> 1;
11881 register_count++;
11882 }
11883
11884
11885 /* If wback is true, also save the base register, which is going to be
11886 written to. */
11887 if (wback)
11888 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11889
11890 /* Save the CPSR register. */
11891 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11892 }
11893 else
11894 {
11895 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11896
11897 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11898
11899 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11900
11901 /* Find out how many registers are going to be stored to memory. */
11902 while (register_bits)
11903 {
11904 if (register_bits & 0x00000001)
11905 register_count++;
11906 register_bits = register_bits >> 1;
11907 }
11908
11909 switch (addr_mode)
11910 {
11911 /* STMDA (STMED): Decrement after. */
11912 case 0:
11913 record_buf_mem[1] = (uint32_t) u_regval
11914 - register_count * INT_REGISTER_SIZE + 4;
11915 break;
11916 /* STM (STMIA, STMEA): Increment after. */
11917 case 1:
11918 record_buf_mem[1] = (uint32_t) u_regval;
11919 break;
11920 /* STMDB (STMFD): Decrement before. */
11921 case 2:
11922 record_buf_mem[1] = (uint32_t) u_regval
11923 - register_count * INT_REGISTER_SIZE;
11924 break;
11925 /* STMIB (STMFA): Increment before. */
11926 case 3:
11927 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11928 break;
11929 default:
11930 gdb_assert_not_reached ("no decoding pattern found");
11931 break;
11932 }
11933
11934 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11935 arm_insn_r->mem_rec_count = 1;
11936
11937 /* If wback is true, also save the base register, which is going to be
11938 written to. */
11939 if (wback)
11940 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11941 }
11942
11943 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11944 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11945 return 0;
11946 }
11947
11948 /* Handling opcode 101 insns. */
11949
11950 static int
11951 arm_record_b_bl (insn_decode_record *arm_insn_r)
11952 {
11953 uint32_t record_buf[8];
11954
11955 /* Handle B, BL, BLX(1) insns. */
11956 /* B simply branches so we do nothing here. */
11957 /* Note: BLX(1) doesnt fall here but instead it falls into
11958 extension space. */
11959 if (bit (arm_insn_r->arm_insn, 24))
11960 {
11961 record_buf[0] = ARM_LR_REGNUM;
11962 arm_insn_r->reg_rec_count = 1;
11963 }
11964
11965 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11966
11967 return 0;
11968 }
11969
11970 /* Handling opcode 110 insns. */
11971
11972 static int
11973 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11974 {
11975 printf_unfiltered (_("Process record does not support instruction "
11976 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11977 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11978
11979 return -1;
11980 }
11981
11982 /* Record handler for vector data transfer instructions. */
11983
11984 static int
11985 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11986 {
11987 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11988 uint32_t record_buf[4];
11989
11990 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11991 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11992 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11993 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11994 bit_l = bit (arm_insn_r->arm_insn, 20);
11995 bit_c = bit (arm_insn_r->arm_insn, 8);
11996
11997 /* Handle VMOV instruction. */
11998 if (bit_l && bit_c)
11999 {
12000 record_buf[0] = reg_t;
12001 arm_insn_r->reg_rec_count = 1;
12002 }
12003 else if (bit_l && !bit_c)
12004 {
12005 /* Handle VMOV instruction. */
12006 if (bits_a == 0x00)
12007 {
12008 if (bit (arm_insn_r->arm_insn, 20))
12009 record_buf[0] = reg_t;
12010 else
12011 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12012 (reg_v << 1));
12013
12014 arm_insn_r->reg_rec_count = 1;
12015 }
12016 /* Handle VMRS instruction. */
12017 else if (bits_a == 0x07)
12018 {
12019 if (reg_t == 15)
12020 reg_t = ARM_PS_REGNUM;
12021
12022 record_buf[0] = reg_t;
12023 arm_insn_r->reg_rec_count = 1;
12024 }
12025 }
12026 else if (!bit_l && !bit_c)
12027 {
12028 /* Handle VMOV instruction. */
12029 if (bits_a == 0x00)
12030 {
12031 if (bit (arm_insn_r->arm_insn, 20))
12032 record_buf[0] = reg_t;
12033 else
12034 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12035 (reg_v << 1));
12036
12037 arm_insn_r->reg_rec_count = 1;
12038 }
12039 /* Handle VMSR instruction. */
12040 else if (bits_a == 0x07)
12041 {
12042 record_buf[0] = ARM_FPSCR_REGNUM;
12043 arm_insn_r->reg_rec_count = 1;
12044 }
12045 }
12046 else if (!bit_l && bit_c)
12047 {
12048 /* Handle VMOV instruction. */
12049 if (!(bits_a & 0x04))
12050 {
12051 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12052 + ARM_D0_REGNUM;
12053 arm_insn_r->reg_rec_count = 1;
12054 }
12055 /* Handle VDUP instruction. */
12056 else
12057 {
12058 if (bit (arm_insn_r->arm_insn, 21))
12059 {
12060 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12061 record_buf[0] = reg_v + ARM_D0_REGNUM;
12062 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12063 arm_insn_r->reg_rec_count = 2;
12064 }
12065 else
12066 {
12067 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12068 record_buf[0] = reg_v + ARM_D0_REGNUM;
12069 arm_insn_r->reg_rec_count = 1;
12070 }
12071 }
12072 }
12073
12074 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12075 return 0;
12076 }
12077
12078 /* Record handler for extension register load/store instructions. */
12079
12080 static int
12081 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12082 {
12083 uint32_t opcode, single_reg;
12084 uint8_t op_vldm_vstm;
12085 uint32_t record_buf[8], record_buf_mem[128];
12086 ULONGEST u_regval = 0;
12087
12088 struct regcache *reg_cache = arm_insn_r->regcache;
12089 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12090
12091 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12092 single_reg = bit (arm_insn_r->arm_insn, 8);
12093 op_vldm_vstm = opcode & 0x1b;
12094
12095 /* Handle VMOV instructions. */
12096 if ((opcode & 0x1e) == 0x04)
12097 {
12098 if (bit (arm_insn_r->arm_insn, 4))
12099 {
12100 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12101 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12102 arm_insn_r->reg_rec_count = 2;
12103 }
12104 else
12105 {
12106 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12107 | bit (arm_insn_r->arm_insn, 5);
12108
12109 if (!single_reg)
12110 {
12111 record_buf[0] = num_regs + reg_m;
12112 record_buf[1] = num_regs + reg_m + 1;
12113 arm_insn_r->reg_rec_count = 2;
12114 }
12115 else
12116 {
12117 record_buf[0] = reg_m + ARM_D0_REGNUM;
12118 arm_insn_r->reg_rec_count = 1;
12119 }
12120 }
12121 }
12122 /* Handle VSTM and VPUSH instructions. */
12123 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12124 || op_vldm_vstm == 0x12)
12125 {
12126 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12127 uint32_t memory_index = 0;
12128
12129 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12130 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12131 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12132 imm_off32 = imm_off8 << 24;
12133 memory_count = imm_off8;
12134
12135 if (bit (arm_insn_r->arm_insn, 23))
12136 start_address = u_regval;
12137 else
12138 start_address = u_regval - imm_off32;
12139
12140 if (bit (arm_insn_r->arm_insn, 21))
12141 {
12142 record_buf[0] = reg_rn;
12143 arm_insn_r->reg_rec_count = 1;
12144 }
12145
12146 while (memory_count > 0)
12147 {
12148 if (!single_reg)
12149 {
12150 record_buf_mem[memory_index] = start_address;
12151 record_buf_mem[memory_index + 1] = 4;
12152 start_address = start_address + 4;
12153 memory_index = memory_index + 2;
12154 }
12155 else
12156 {
12157 record_buf_mem[memory_index] = start_address;
12158 record_buf_mem[memory_index + 1] = 4;
12159 record_buf_mem[memory_index + 2] = start_address + 4;
12160 record_buf_mem[memory_index + 3] = 4;
12161 start_address = start_address + 8;
12162 memory_index = memory_index + 4;
12163 }
12164 memory_count--;
12165 }
12166 arm_insn_r->mem_rec_count = (memory_index >> 1);
12167 }
12168 /* Handle VLDM instructions. */
12169 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12170 || op_vldm_vstm == 0x13)
12171 {
12172 uint32_t reg_count, reg_vd;
12173 uint32_t reg_index = 0;
12174
12175 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12176 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12177
12178 if (single_reg)
12179 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12180 else
12181 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12182
12183 if (bit (arm_insn_r->arm_insn, 21))
12184 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12185
12186 while (reg_count > 0)
12187 {
12188 if (single_reg)
12189 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12190 else
12191 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12192
12193 reg_count--;
12194 }
12195 arm_insn_r->reg_rec_count = reg_index;
12196 }
12197 /* VSTR Vector store register. */
12198 else if ((opcode & 0x13) == 0x10)
12199 {
12200 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12201 uint32_t memory_index = 0;
12202
12203 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12204 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12205 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12206 imm_off32 = imm_off8 << 24;
12207 memory_count = imm_off8;
12208
12209 if (bit (arm_insn_r->arm_insn, 23))
12210 start_address = u_regval + imm_off32;
12211 else
12212 start_address = u_regval - imm_off32;
12213
12214 if (single_reg)
12215 {
12216 record_buf_mem[memory_index] = start_address;
12217 record_buf_mem[memory_index + 1] = 4;
12218 arm_insn_r->mem_rec_count = 1;
12219 }
12220 else
12221 {
12222 record_buf_mem[memory_index] = start_address;
12223 record_buf_mem[memory_index + 1] = 4;
12224 record_buf_mem[memory_index + 2] = start_address + 4;
12225 record_buf_mem[memory_index + 3] = 4;
12226 arm_insn_r->mem_rec_count = 2;
12227 }
12228 }
12229 /* VLDR Vector load register. */
12230 else if ((opcode & 0x13) == 0x11)
12231 {
12232 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12233
12234 if (!single_reg)
12235 {
12236 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12237 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12238 }
12239 else
12240 {
12241 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12242 record_buf[0] = num_regs + reg_vd;
12243 }
12244 arm_insn_r->reg_rec_count = 1;
12245 }
12246
12247 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12248 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12249 return 0;
12250 }
12251
12252 /* Record handler for arm/thumb mode VFP data processing instructions. */
12253
12254 static int
12255 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12256 {
12257 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12258 uint32_t record_buf[4];
12259 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12260 enum insn_types curr_insn_type = INSN_INV;
12261
12262 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12263 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12264 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12265 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12266 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12267 bit_d = bit (arm_insn_r->arm_insn, 22);
12268 opc1 = opc1 & 0x04;
12269
12270 /* Handle VMLA, VMLS. */
12271 if (opc1 == 0x00)
12272 {
12273 if (bit (arm_insn_r->arm_insn, 10))
12274 {
12275 if (bit (arm_insn_r->arm_insn, 6))
12276 curr_insn_type = INSN_T0;
12277 else
12278 curr_insn_type = INSN_T1;
12279 }
12280 else
12281 {
12282 if (dp_op_sz)
12283 curr_insn_type = INSN_T1;
12284 else
12285 curr_insn_type = INSN_T2;
12286 }
12287 }
12288 /* Handle VNMLA, VNMLS, VNMUL. */
12289 else if (opc1 == 0x01)
12290 {
12291 if (dp_op_sz)
12292 curr_insn_type = INSN_T1;
12293 else
12294 curr_insn_type = INSN_T2;
12295 }
12296 /* Handle VMUL. */
12297 else if (opc1 == 0x02 && !(opc3 & 0x01))
12298 {
12299 if (bit (arm_insn_r->arm_insn, 10))
12300 {
12301 if (bit (arm_insn_r->arm_insn, 6))
12302 curr_insn_type = INSN_T0;
12303 else
12304 curr_insn_type = INSN_T1;
12305 }
12306 else
12307 {
12308 if (dp_op_sz)
12309 curr_insn_type = INSN_T1;
12310 else
12311 curr_insn_type = INSN_T2;
12312 }
12313 }
12314 /* Handle VADD, VSUB. */
12315 else if (opc1 == 0x03)
12316 {
12317 if (!bit (arm_insn_r->arm_insn, 9))
12318 {
12319 if (bit (arm_insn_r->arm_insn, 6))
12320 curr_insn_type = INSN_T0;
12321 else
12322 curr_insn_type = INSN_T1;
12323 }
12324 else
12325 {
12326 if (dp_op_sz)
12327 curr_insn_type = INSN_T1;
12328 else
12329 curr_insn_type = INSN_T2;
12330 }
12331 }
12332 /* Handle VDIV. */
12333 else if (opc1 == 0x0b)
12334 {
12335 if (dp_op_sz)
12336 curr_insn_type = INSN_T1;
12337 else
12338 curr_insn_type = INSN_T2;
12339 }
12340 /* Handle all other vfp data processing instructions. */
12341 else if (opc1 == 0x0b)
12342 {
12343 /* Handle VMOV. */
12344 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12345 {
12346 if (bit (arm_insn_r->arm_insn, 4))
12347 {
12348 if (bit (arm_insn_r->arm_insn, 6))
12349 curr_insn_type = INSN_T0;
12350 else
12351 curr_insn_type = INSN_T1;
12352 }
12353 else
12354 {
12355 if (dp_op_sz)
12356 curr_insn_type = INSN_T1;
12357 else
12358 curr_insn_type = INSN_T2;
12359 }
12360 }
12361 /* Handle VNEG and VABS. */
12362 else if ((opc2 == 0x01 && opc3 == 0x01)
12363 || (opc2 == 0x00 && opc3 == 0x03))
12364 {
12365 if (!bit (arm_insn_r->arm_insn, 11))
12366 {
12367 if (bit (arm_insn_r->arm_insn, 6))
12368 curr_insn_type = INSN_T0;
12369 else
12370 curr_insn_type = INSN_T1;
12371 }
12372 else
12373 {
12374 if (dp_op_sz)
12375 curr_insn_type = INSN_T1;
12376 else
12377 curr_insn_type = INSN_T2;
12378 }
12379 }
12380 /* Handle VSQRT. */
12381 else if (opc2 == 0x01 && opc3 == 0x03)
12382 {
12383 if (dp_op_sz)
12384 curr_insn_type = INSN_T1;
12385 else
12386 curr_insn_type = INSN_T2;
12387 }
12388 /* Handle VCVT. */
12389 else if (opc2 == 0x07 && opc3 == 0x03)
12390 {
12391 if (!dp_op_sz)
12392 curr_insn_type = INSN_T1;
12393 else
12394 curr_insn_type = INSN_T2;
12395 }
12396 else if (opc3 & 0x01)
12397 {
12398 /* Handle VCVT. */
12399 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12400 {
12401 if (!bit (arm_insn_r->arm_insn, 18))
12402 curr_insn_type = INSN_T2;
12403 else
12404 {
12405 if (dp_op_sz)
12406 curr_insn_type = INSN_T1;
12407 else
12408 curr_insn_type = INSN_T2;
12409 }
12410 }
12411 /* Handle VCVT. */
12412 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12413 {
12414 if (dp_op_sz)
12415 curr_insn_type = INSN_T1;
12416 else
12417 curr_insn_type = INSN_T2;
12418 }
12419 /* Handle VCVTB, VCVTT. */
12420 else if ((opc2 & 0x0e) == 0x02)
12421 curr_insn_type = INSN_T2;
12422 /* Handle VCMP, VCMPE. */
12423 else if ((opc2 & 0x0e) == 0x04)
12424 curr_insn_type = INSN_T3;
12425 }
12426 }
12427
12428 switch (curr_insn_type)
12429 {
12430 case INSN_T0:
12431 reg_vd = reg_vd | (bit_d << 4);
12432 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12433 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12434 arm_insn_r->reg_rec_count = 2;
12435 break;
12436
12437 case INSN_T1:
12438 reg_vd = reg_vd | (bit_d << 4);
12439 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12440 arm_insn_r->reg_rec_count = 1;
12441 break;
12442
12443 case INSN_T2:
12444 reg_vd = (reg_vd << 1) | bit_d;
12445 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12446 arm_insn_r->reg_rec_count = 1;
12447 break;
12448
12449 case INSN_T3:
12450 record_buf[0] = ARM_FPSCR_REGNUM;
12451 arm_insn_r->reg_rec_count = 1;
12452 break;
12453
12454 default:
12455 gdb_assert_not_reached ("no decoding pattern found");
12456 break;
12457 }
12458
12459 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12460 return 0;
12461 }
12462
12463 /* Handling opcode 110 insns. */
12464
12465 static int
12466 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12467 {
12468 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12469
12470 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12471 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12472 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12473
12474 if ((coproc & 0x0e) == 0x0a)
12475 {
12476 /* Handle extension register ld/st instructions. */
12477 if (!(op1 & 0x20))
12478 return arm_record_exreg_ld_st_insn (arm_insn_r);
12479
12480 /* 64-bit transfers between arm core and extension registers. */
12481 if ((op1 & 0x3e) == 0x04)
12482 return arm_record_exreg_ld_st_insn (arm_insn_r);
12483 }
12484 else
12485 {
12486 /* Handle coprocessor ld/st instructions. */
12487 if (!(op1 & 0x3a))
12488 {
12489 /* Store. */
12490 if (!op1_ebit)
12491 return arm_record_unsupported_insn (arm_insn_r);
12492 else
12493 /* Load. */
12494 return arm_record_unsupported_insn (arm_insn_r);
12495 }
12496
12497 /* Move to coprocessor from two arm core registers. */
12498 if (op1 == 0x4)
12499 return arm_record_unsupported_insn (arm_insn_r);
12500
12501 /* Move to two arm core registers from coprocessor. */
12502 if (op1 == 0x5)
12503 {
12504 uint32_t reg_t[2];
12505
12506 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12507 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12508 arm_insn_r->reg_rec_count = 2;
12509
12510 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12511 return 0;
12512 }
12513 }
12514 return arm_record_unsupported_insn (arm_insn_r);
12515 }
12516
12517 /* Handling opcode 111 insns. */
12518
12519 static int
12520 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12521 {
12522 uint32_t op, op1_sbit, op1_ebit, coproc;
12523 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12524 struct regcache *reg_cache = arm_insn_r->regcache;
12525 ULONGEST u_regval = 0;
12526
12527 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12528 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12529 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12530 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12531 op = bit (arm_insn_r->arm_insn, 4);
12532
12533 /* Handle arm SWI/SVC system call instructions. */
12534 if (op1_sbit)
12535 {
12536 if (tdep->arm_syscall_record != NULL)
12537 {
12538 ULONGEST svc_operand, svc_number;
12539
12540 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12541
12542 if (svc_operand) /* OABI. */
12543 svc_number = svc_operand - 0x900000;
12544 else /* EABI. */
12545 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12546
12547 return tdep->arm_syscall_record (reg_cache, svc_number);
12548 }
12549 else
12550 {
12551 printf_unfiltered (_("no syscall record support\n"));
12552 return -1;
12553 }
12554 }
12555
12556 if ((coproc & 0x0e) == 0x0a)
12557 {
12558 /* VFP data-processing instructions. */
12559 if (!op1_sbit && !op)
12560 return arm_record_vfp_data_proc_insn (arm_insn_r);
12561
12562 /* Advanced SIMD, VFP instructions. */
12563 if (!op1_sbit && op)
12564 return arm_record_vdata_transfer_insn (arm_insn_r);
12565 }
12566 else
12567 {
12568 /* Coprocessor data operations. */
12569 if (!op1_sbit && !op)
12570 return arm_record_unsupported_insn (arm_insn_r);
12571
12572 /* Move to Coprocessor from ARM core register. */
12573 if (!op1_sbit && !op1_ebit && op)
12574 return arm_record_unsupported_insn (arm_insn_r);
12575
12576 /* Move to arm core register from coprocessor. */
12577 if (!op1_sbit && op1_ebit && op)
12578 {
12579 uint32_t record_buf[1];
12580
12581 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12582 if (record_buf[0] == 15)
12583 record_buf[0] = ARM_PS_REGNUM;
12584
12585 arm_insn_r->reg_rec_count = 1;
12586 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12587 record_buf);
12588 return 0;
12589 }
12590 }
12591
12592 return arm_record_unsupported_insn (arm_insn_r);
12593 }
12594
12595 /* Handling opcode 000 insns. */
12596
12597 static int
12598 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12599 {
12600 uint32_t record_buf[8];
12601 uint32_t reg_src1 = 0;
12602
12603 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12604
12605 record_buf[0] = ARM_PS_REGNUM;
12606 record_buf[1] = reg_src1;
12607 thumb_insn_r->reg_rec_count = 2;
12608
12609 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12610
12611 return 0;
12612 }
12613
12614
12615 /* Handling opcode 001 insns. */
12616
12617 static int
12618 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12619 {
12620 uint32_t record_buf[8];
12621 uint32_t reg_src1 = 0;
12622
12623 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12624
12625 record_buf[0] = ARM_PS_REGNUM;
12626 record_buf[1] = reg_src1;
12627 thumb_insn_r->reg_rec_count = 2;
12628
12629 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12630
12631 return 0;
12632 }
12633
12634 /* Handling opcode 010 insns. */
12635
12636 static int
12637 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12638 {
12639 struct regcache *reg_cache = thumb_insn_r->regcache;
12640 uint32_t record_buf[8], record_buf_mem[8];
12641
12642 uint32_t reg_src1 = 0, reg_src2 = 0;
12643 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12644
12645 ULONGEST u_regval[2] = {0};
12646
12647 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12648
12649 if (bit (thumb_insn_r->arm_insn, 12))
12650 {
12651 /* Handle load/store register offset. */
12652 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12653 if (opcode2 >= 12 && opcode2 <= 15)
12654 {
12655 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12656 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12657 record_buf[0] = reg_src1;
12658 thumb_insn_r->reg_rec_count = 1;
12659 }
12660 else if (opcode2 >= 8 && opcode2 <= 10)
12661 {
12662 /* STR(2), STRB(2), STRH(2) . */
12663 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12664 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12665 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12666 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12667 if (8 == opcode2)
12668 record_buf_mem[0] = 4; /* STR (2). */
12669 else if (10 == opcode2)
12670 record_buf_mem[0] = 1; /* STRB (2). */
12671 else if (9 == opcode2)
12672 record_buf_mem[0] = 2; /* STRH (2). */
12673 record_buf_mem[1] = u_regval[0] + u_regval[1];
12674 thumb_insn_r->mem_rec_count = 1;
12675 }
12676 }
12677 else if (bit (thumb_insn_r->arm_insn, 11))
12678 {
12679 /* Handle load from literal pool. */
12680 /* LDR(3). */
12681 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12682 record_buf[0] = reg_src1;
12683 thumb_insn_r->reg_rec_count = 1;
12684 }
12685 else if (opcode1)
12686 {
12687 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12688 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12689 if ((3 == opcode2) && (!opcode3))
12690 {
12691 /* Branch with exchange. */
12692 record_buf[0] = ARM_PS_REGNUM;
12693 thumb_insn_r->reg_rec_count = 1;
12694 }
12695 else
12696 {
12697 /* Format 8; special data processing insns. */
12698 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12699 record_buf[0] = ARM_PS_REGNUM;
12700 record_buf[1] = reg_src1;
12701 thumb_insn_r->reg_rec_count = 2;
12702 }
12703 }
12704 else
12705 {
12706 /* Format 5; data processing insns. */
12707 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12708 if (bit (thumb_insn_r->arm_insn, 7))
12709 {
12710 reg_src1 = reg_src1 + 8;
12711 }
12712 record_buf[0] = ARM_PS_REGNUM;
12713 record_buf[1] = reg_src1;
12714 thumb_insn_r->reg_rec_count = 2;
12715 }
12716
12717 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12718 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12719 record_buf_mem);
12720
12721 return 0;
12722 }
12723
12724 /* Handling opcode 001 insns. */
12725
12726 static int
12727 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12728 {
12729 struct regcache *reg_cache = thumb_insn_r->regcache;
12730 uint32_t record_buf[8], record_buf_mem[8];
12731
12732 uint32_t reg_src1 = 0;
12733 uint32_t opcode = 0, immed_5 = 0;
12734
12735 ULONGEST u_regval = 0;
12736
12737 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12738
12739 if (opcode)
12740 {
12741 /* LDR(1). */
12742 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12743 record_buf[0] = reg_src1;
12744 thumb_insn_r->reg_rec_count = 1;
12745 }
12746 else
12747 {
12748 /* STR(1). */
12749 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12750 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12751 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12752 record_buf_mem[0] = 4;
12753 record_buf_mem[1] = u_regval + (immed_5 * 4);
12754 thumb_insn_r->mem_rec_count = 1;
12755 }
12756
12757 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12758 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12759 record_buf_mem);
12760
12761 return 0;
12762 }
12763
12764 /* Handling opcode 100 insns. */
12765
12766 static int
12767 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12768 {
12769 struct regcache *reg_cache = thumb_insn_r->regcache;
12770 uint32_t record_buf[8], record_buf_mem[8];
12771
12772 uint32_t reg_src1 = 0;
12773 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12774
12775 ULONGEST u_regval = 0;
12776
12777 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12778
12779 if (3 == opcode)
12780 {
12781 /* LDR(4). */
12782 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12783 record_buf[0] = reg_src1;
12784 thumb_insn_r->reg_rec_count = 1;
12785 }
12786 else if (1 == opcode)
12787 {
12788 /* LDRH(1). */
12789 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12790 record_buf[0] = reg_src1;
12791 thumb_insn_r->reg_rec_count = 1;
12792 }
12793 else if (2 == opcode)
12794 {
12795 /* STR(3). */
12796 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12797 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12798 record_buf_mem[0] = 4;
12799 record_buf_mem[1] = u_regval + (immed_8 * 4);
12800 thumb_insn_r->mem_rec_count = 1;
12801 }
12802 else if (0 == opcode)
12803 {
12804 /* STRH(1). */
12805 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12806 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12807 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12808 record_buf_mem[0] = 2;
12809 record_buf_mem[1] = u_regval + (immed_5 * 2);
12810 thumb_insn_r->mem_rec_count = 1;
12811 }
12812
12813 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12814 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12815 record_buf_mem);
12816
12817 return 0;
12818 }
12819
12820 /* Handling opcode 101 insns. */
12821
12822 static int
12823 thumb_record_misc (insn_decode_record *thumb_insn_r)
12824 {
12825 struct regcache *reg_cache = thumb_insn_r->regcache;
12826
12827 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12828 uint32_t register_bits = 0, register_count = 0;
12829 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12830 uint32_t record_buf[24], record_buf_mem[48];
12831 uint32_t reg_src1;
12832
12833 ULONGEST u_regval = 0;
12834
12835 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12836 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12837 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12838
12839 if (14 == opcode2)
12840 {
12841 /* POP. */
12842 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12843 while (register_bits)
12844 {
12845 if (register_bits & 0x00000001)
12846 record_buf[index++] = register_count;
12847 register_bits = register_bits >> 1;
12848 register_count++;
12849 }
12850 record_buf[index++] = ARM_PS_REGNUM;
12851 record_buf[index++] = ARM_SP_REGNUM;
12852 thumb_insn_r->reg_rec_count = index;
12853 }
12854 else if (10 == opcode2)
12855 {
12856 /* PUSH. */
12857 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12858 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12859 while (register_bits)
12860 {
12861 if (register_bits & 0x00000001)
12862 register_count++;
12863 register_bits = register_bits >> 1;
12864 }
12865 start_address = u_regval - \
12866 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12867 thumb_insn_r->mem_rec_count = register_count;
12868 while (register_count)
12869 {
12870 record_buf_mem[(register_count * 2) - 1] = start_address;
12871 record_buf_mem[(register_count * 2) - 2] = 4;
12872 start_address = start_address + 4;
12873 register_count--;
12874 }
12875 record_buf[0] = ARM_SP_REGNUM;
12876 thumb_insn_r->reg_rec_count = 1;
12877 }
12878 else if (0x1E == opcode1)
12879 {
12880 /* BKPT insn. */
12881 /* Handle enhanced software breakpoint insn, BKPT. */
12882 /* CPSR is changed to be executed in ARM state, disabling normal
12883 interrupts, entering abort mode. */
12884 /* According to high vector configuration PC is set. */
12885 /* User hits breakpoint and type reverse, in that case, we need to go back with
12886 previous CPSR and Program Counter. */
12887 record_buf[0] = ARM_PS_REGNUM;
12888 record_buf[1] = ARM_LR_REGNUM;
12889 thumb_insn_r->reg_rec_count = 2;
12890 /* We need to save SPSR value, which is not yet done. */
12891 printf_unfiltered (_("Process record does not support instruction "
12892 "0x%0x at address %s.\n"),
12893 thumb_insn_r->arm_insn,
12894 paddress (thumb_insn_r->gdbarch,
12895 thumb_insn_r->this_addr));
12896 return -1;
12897 }
12898 else if ((0 == opcode) || (1 == opcode))
12899 {
12900 /* ADD(5), ADD(6). */
12901 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12902 record_buf[0] = reg_src1;
12903 thumb_insn_r->reg_rec_count = 1;
12904 }
12905 else if (2 == opcode)
12906 {
12907 /* ADD(7), SUB(4). */
12908 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12909 record_buf[0] = ARM_SP_REGNUM;
12910 thumb_insn_r->reg_rec_count = 1;
12911 }
12912
12913 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12914 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12915 record_buf_mem);
12916
12917 return 0;
12918 }
12919
12920 /* Handling opcode 110 insns. */
12921
12922 static int
12923 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12924 {
12925 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12926 struct regcache *reg_cache = thumb_insn_r->regcache;
12927
12928 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12929 uint32_t reg_src1 = 0;
12930 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12931 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12932 uint32_t record_buf[24], record_buf_mem[48];
12933
12934 ULONGEST u_regval = 0;
12935
12936 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12937 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12938
12939 if (1 == opcode2)
12940 {
12941
12942 /* LDMIA. */
12943 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12944 /* Get Rn. */
12945 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12946 while (register_bits)
12947 {
12948 if (register_bits & 0x00000001)
12949 record_buf[index++] = register_count;
12950 register_bits = register_bits >> 1;
12951 register_count++;
12952 }
12953 record_buf[index++] = reg_src1;
12954 thumb_insn_r->reg_rec_count = index;
12955 }
12956 else if (0 == opcode2)
12957 {
12958 /* It handles both STMIA. */
12959 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12960 /* Get Rn. */
12961 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12962 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12963 while (register_bits)
12964 {
12965 if (register_bits & 0x00000001)
12966 register_count++;
12967 register_bits = register_bits >> 1;
12968 }
12969 start_address = u_regval;
12970 thumb_insn_r->mem_rec_count = register_count;
12971 while (register_count)
12972 {
12973 record_buf_mem[(register_count * 2) - 1] = start_address;
12974 record_buf_mem[(register_count * 2) - 2] = 4;
12975 start_address = start_address + 4;
12976 register_count--;
12977 }
12978 }
12979 else if (0x1F == opcode1)
12980 {
12981 /* Handle arm syscall insn. */
12982 if (tdep->arm_syscall_record != NULL)
12983 {
12984 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12985 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12986 }
12987 else
12988 {
12989 printf_unfiltered (_("no syscall record support\n"));
12990 return -1;
12991 }
12992 }
12993
12994 /* B (1), conditional branch is automatically taken care in process_record,
12995 as PC is saved there. */
12996
12997 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12998 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12999 record_buf_mem);
13000
13001 return ret;
13002 }
13003
13004 /* Handling opcode 111 insns. */
13005
13006 static int
13007 thumb_record_branch (insn_decode_record *thumb_insn_r)
13008 {
13009 uint32_t record_buf[8];
13010 uint32_t bits_h = 0;
13011
13012 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13013
13014 if (2 == bits_h || 3 == bits_h)
13015 {
13016 /* BL */
13017 record_buf[0] = ARM_LR_REGNUM;
13018 thumb_insn_r->reg_rec_count = 1;
13019 }
13020 else if (1 == bits_h)
13021 {
13022 /* BLX(1). */
13023 record_buf[0] = ARM_PS_REGNUM;
13024 record_buf[1] = ARM_LR_REGNUM;
13025 thumb_insn_r->reg_rec_count = 2;
13026 }
13027
13028 /* B(2) is automatically taken care in process_record, as PC is
13029 saved there. */
13030
13031 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13032
13033 return 0;
13034 }
13035
13036 /* Handler for thumb2 load/store multiple instructions. */
13037
13038 static int
13039 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13040 {
13041 struct regcache *reg_cache = thumb2_insn_r->regcache;
13042
13043 uint32_t reg_rn, op;
13044 uint32_t register_bits = 0, register_count = 0;
13045 uint32_t index = 0, start_address = 0;
13046 uint32_t record_buf[24], record_buf_mem[48];
13047
13048 ULONGEST u_regval = 0;
13049
13050 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13051 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13052
13053 if (0 == op || 3 == op)
13054 {
13055 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13056 {
13057 /* Handle RFE instruction. */
13058 record_buf[0] = ARM_PS_REGNUM;
13059 thumb2_insn_r->reg_rec_count = 1;
13060 }
13061 else
13062 {
13063 /* Handle SRS instruction after reading banked SP. */
13064 return arm_record_unsupported_insn (thumb2_insn_r);
13065 }
13066 }
13067 else if (1 == op || 2 == op)
13068 {
13069 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13070 {
13071 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13072 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13073 while (register_bits)
13074 {
13075 if (register_bits & 0x00000001)
13076 record_buf[index++] = register_count;
13077
13078 register_count++;
13079 register_bits = register_bits >> 1;
13080 }
13081 record_buf[index++] = reg_rn;
13082 record_buf[index++] = ARM_PS_REGNUM;
13083 thumb2_insn_r->reg_rec_count = index;
13084 }
13085 else
13086 {
13087 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13088 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13089 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13090 while (register_bits)
13091 {
13092 if (register_bits & 0x00000001)
13093 register_count++;
13094
13095 register_bits = register_bits >> 1;
13096 }
13097
13098 if (1 == op)
13099 {
13100 /* Start address calculation for LDMDB/LDMEA. */
13101 start_address = u_regval;
13102 }
13103 else if (2 == op)
13104 {
13105 /* Start address calculation for LDMDB/LDMEA. */
13106 start_address = u_regval - register_count * 4;
13107 }
13108
13109 thumb2_insn_r->mem_rec_count = register_count;
13110 while (register_count)
13111 {
13112 record_buf_mem[register_count * 2 - 1] = start_address;
13113 record_buf_mem[register_count * 2 - 2] = 4;
13114 start_address = start_address + 4;
13115 register_count--;
13116 }
13117 record_buf[0] = reg_rn;
13118 record_buf[1] = ARM_PS_REGNUM;
13119 thumb2_insn_r->reg_rec_count = 2;
13120 }
13121 }
13122
13123 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13124 record_buf_mem);
13125 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13126 record_buf);
13127 return ARM_RECORD_SUCCESS;
13128 }
13129
13130 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13131 instructions. */
13132
13133 static int
13134 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13135 {
13136 struct regcache *reg_cache = thumb2_insn_r->regcache;
13137
13138 uint32_t reg_rd, reg_rn, offset_imm;
13139 uint32_t reg_dest1, reg_dest2;
13140 uint32_t address, offset_addr;
13141 uint32_t record_buf[8], record_buf_mem[8];
13142 uint32_t op1, op2, op3;
13143 LONGEST s_word;
13144
13145 ULONGEST u_regval[2];
13146
13147 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13148 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13149 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13150
13151 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13152 {
13153 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13154 {
13155 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13156 record_buf[0] = reg_dest1;
13157 record_buf[1] = ARM_PS_REGNUM;
13158 thumb2_insn_r->reg_rec_count = 2;
13159 }
13160
13161 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13162 {
13163 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13164 record_buf[2] = reg_dest2;
13165 thumb2_insn_r->reg_rec_count = 3;
13166 }
13167 }
13168 else
13169 {
13170 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13171 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13172
13173 if (0 == op1 && 0 == op2)
13174 {
13175 /* Handle STREX. */
13176 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13177 address = u_regval[0] + (offset_imm * 4);
13178 record_buf_mem[0] = 4;
13179 record_buf_mem[1] = address;
13180 thumb2_insn_r->mem_rec_count = 1;
13181 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13182 record_buf[0] = reg_rd;
13183 thumb2_insn_r->reg_rec_count = 1;
13184 }
13185 else if (1 == op1 && 0 == op2)
13186 {
13187 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13188 record_buf[0] = reg_rd;
13189 thumb2_insn_r->reg_rec_count = 1;
13190 address = u_regval[0];
13191 record_buf_mem[1] = address;
13192
13193 if (4 == op3)
13194 {
13195 /* Handle STREXB. */
13196 record_buf_mem[0] = 1;
13197 thumb2_insn_r->mem_rec_count = 1;
13198 }
13199 else if (5 == op3)
13200 {
13201 /* Handle STREXH. */
13202 record_buf_mem[0] = 2 ;
13203 thumb2_insn_r->mem_rec_count = 1;
13204 }
13205 else if (7 == op3)
13206 {
13207 /* Handle STREXD. */
13208 address = u_regval[0];
13209 record_buf_mem[0] = 4;
13210 record_buf_mem[2] = 4;
13211 record_buf_mem[3] = address + 4;
13212 thumb2_insn_r->mem_rec_count = 2;
13213 }
13214 }
13215 else
13216 {
13217 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13218
13219 if (bit (thumb2_insn_r->arm_insn, 24))
13220 {
13221 if (bit (thumb2_insn_r->arm_insn, 23))
13222 offset_addr = u_regval[0] + (offset_imm * 4);
13223 else
13224 offset_addr = u_regval[0] - (offset_imm * 4);
13225
13226 address = offset_addr;
13227 }
13228 else
13229 address = u_regval[0];
13230
13231 record_buf_mem[0] = 4;
13232 record_buf_mem[1] = address;
13233 record_buf_mem[2] = 4;
13234 record_buf_mem[3] = address + 4;
13235 thumb2_insn_r->mem_rec_count = 2;
13236 record_buf[0] = reg_rn;
13237 thumb2_insn_r->reg_rec_count = 1;
13238 }
13239 }
13240
13241 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13242 record_buf);
13243 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13244 record_buf_mem);
13245 return ARM_RECORD_SUCCESS;
13246 }
13247
13248 /* Handler for thumb2 data processing (shift register and modified immediate)
13249 instructions. */
13250
13251 static int
13252 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13253 {
13254 uint32_t reg_rd, op;
13255 uint32_t record_buf[8];
13256
13257 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13258 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13259
13260 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13261 {
13262 record_buf[0] = ARM_PS_REGNUM;
13263 thumb2_insn_r->reg_rec_count = 1;
13264 }
13265 else
13266 {
13267 record_buf[0] = reg_rd;
13268 record_buf[1] = ARM_PS_REGNUM;
13269 thumb2_insn_r->reg_rec_count = 2;
13270 }
13271
13272 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13273 record_buf);
13274 return ARM_RECORD_SUCCESS;
13275 }
13276
13277 /* Generic handler for thumb2 instructions which effect destination and PS
13278 registers. */
13279
13280 static int
13281 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13282 {
13283 uint32_t reg_rd;
13284 uint32_t record_buf[8];
13285
13286 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13287
13288 record_buf[0] = reg_rd;
13289 record_buf[1] = ARM_PS_REGNUM;
13290 thumb2_insn_r->reg_rec_count = 2;
13291
13292 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13293 record_buf);
13294 return ARM_RECORD_SUCCESS;
13295 }
13296
13297 /* Handler for thumb2 branch and miscellaneous control instructions. */
13298
13299 static int
13300 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13301 {
13302 uint32_t op, op1, op2;
13303 uint32_t record_buf[8];
13304
13305 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13306 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13307 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13308
13309 /* Handle MSR insn. */
13310 if (!(op1 & 0x2) && 0x38 == op)
13311 {
13312 if (!(op2 & 0x3))
13313 {
13314 /* CPSR is going to be changed. */
13315 record_buf[0] = ARM_PS_REGNUM;
13316 thumb2_insn_r->reg_rec_count = 1;
13317 }
13318 else
13319 {
13320 arm_record_unsupported_insn(thumb2_insn_r);
13321 return -1;
13322 }
13323 }
13324 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13325 {
13326 /* BLX. */
13327 record_buf[0] = ARM_PS_REGNUM;
13328 record_buf[1] = ARM_LR_REGNUM;
13329 thumb2_insn_r->reg_rec_count = 2;
13330 }
13331
13332 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13333 record_buf);
13334 return ARM_RECORD_SUCCESS;
13335 }
13336
13337 /* Handler for thumb2 store single data item instructions. */
13338
13339 static int
13340 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13341 {
13342 struct regcache *reg_cache = thumb2_insn_r->regcache;
13343
13344 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13345 uint32_t address, offset_addr;
13346 uint32_t record_buf[8], record_buf_mem[8];
13347 uint32_t op1, op2;
13348
13349 ULONGEST u_regval[2];
13350
13351 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13352 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13353 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13354 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13355
13356 if (bit (thumb2_insn_r->arm_insn, 23))
13357 {
13358 /* T2 encoding. */
13359 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13360 offset_addr = u_regval[0] + offset_imm;
13361 address = offset_addr;
13362 }
13363 else
13364 {
13365 /* T3 encoding. */
13366 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13367 {
13368 /* Handle STRB (register). */
13369 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13370 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13371 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13372 offset_addr = u_regval[1] << shift_imm;
13373 address = u_regval[0] + offset_addr;
13374 }
13375 else
13376 {
13377 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13378 if (bit (thumb2_insn_r->arm_insn, 10))
13379 {
13380 if (bit (thumb2_insn_r->arm_insn, 9))
13381 offset_addr = u_regval[0] + offset_imm;
13382 else
13383 offset_addr = u_regval[0] - offset_imm;
13384
13385 address = offset_addr;
13386 }
13387 else
13388 address = u_regval[0];
13389 }
13390 }
13391
13392 switch (op1)
13393 {
13394 /* Store byte instructions. */
13395 case 4:
13396 case 0:
13397 record_buf_mem[0] = 1;
13398 break;
13399 /* Store half word instructions. */
13400 case 1:
13401 case 5:
13402 record_buf_mem[0] = 2;
13403 break;
13404 /* Store word instructions. */
13405 case 2:
13406 case 6:
13407 record_buf_mem[0] = 4;
13408 break;
13409
13410 default:
13411 gdb_assert_not_reached ("no decoding pattern found");
13412 break;
13413 }
13414
13415 record_buf_mem[1] = address;
13416 thumb2_insn_r->mem_rec_count = 1;
13417 record_buf[0] = reg_rn;
13418 thumb2_insn_r->reg_rec_count = 1;
13419
13420 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13421 record_buf);
13422 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13423 record_buf_mem);
13424 return ARM_RECORD_SUCCESS;
13425 }
13426
13427 /* Handler for thumb2 load memory hints instructions. */
13428
13429 static int
13430 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13431 {
13432 uint32_t record_buf[8];
13433 uint32_t reg_rt, reg_rn;
13434
13435 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13436 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13437
13438 if (ARM_PC_REGNUM != reg_rt)
13439 {
13440 record_buf[0] = reg_rt;
13441 record_buf[1] = reg_rn;
13442 record_buf[2] = ARM_PS_REGNUM;
13443 thumb2_insn_r->reg_rec_count = 3;
13444
13445 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13446 record_buf);
13447 return ARM_RECORD_SUCCESS;
13448 }
13449
13450 return ARM_RECORD_FAILURE;
13451 }
13452
13453 /* Handler for thumb2 load word instructions. */
13454
13455 static int
13456 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13457 {
13458 uint32_t opcode1 = 0, opcode2 = 0;
13459 uint32_t record_buf[8];
13460
13461 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13462 record_buf[1] = ARM_PS_REGNUM;
13463 thumb2_insn_r->reg_rec_count = 2;
13464
13465 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13466 record_buf);
13467 return ARM_RECORD_SUCCESS;
13468 }
13469
13470 /* Handler for thumb2 long multiply, long multiply accumulate, and
13471 divide instructions. */
13472
13473 static int
13474 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13475 {
13476 uint32_t opcode1 = 0, opcode2 = 0;
13477 uint32_t record_buf[8];
13478 uint32_t reg_src1 = 0;
13479
13480 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13481 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13482
13483 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13484 {
13485 /* Handle SMULL, UMULL, SMULAL. */
13486 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13487 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13488 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13489 record_buf[2] = ARM_PS_REGNUM;
13490 thumb2_insn_r->reg_rec_count = 3;
13491 }
13492 else if (1 == opcode1 || 3 == opcode2)
13493 {
13494 /* Handle SDIV and UDIV. */
13495 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13496 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13497 record_buf[2] = ARM_PS_REGNUM;
13498 thumb2_insn_r->reg_rec_count = 3;
13499 }
13500 else
13501 return ARM_RECORD_FAILURE;
13502
13503 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13504 record_buf);
13505 return ARM_RECORD_SUCCESS;
13506 }
13507
13508 /* Record handler for thumb32 coprocessor instructions. */
13509
13510 static int
13511 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13512 {
13513 if (bit (thumb2_insn_r->arm_insn, 25))
13514 return arm_record_coproc_data_proc (thumb2_insn_r);
13515 else
13516 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13517 }
13518
13519 /* Record handler for advance SIMD structure load/store instructions. */
13520
13521 static int
13522 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13523 {
13524 struct regcache *reg_cache = thumb2_insn_r->regcache;
13525 uint32_t l_bit, a_bit, b_bits;
13526 uint32_t record_buf[128], record_buf_mem[128];
13527 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13528 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13529 uint8_t f_ebytes;
13530
13531 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13532 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13533 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13534 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13535 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13536 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13537 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13538 f_esize = 8 * f_ebytes;
13539 f_elem = 8 / f_ebytes;
13540
13541 if (!l_bit)
13542 {
13543 ULONGEST u_regval = 0;
13544 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13545 address = u_regval;
13546
13547 if (!a_bit)
13548 {
13549 /* Handle VST1. */
13550 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13551 {
13552 if (b_bits == 0x07)
13553 bf_regs = 1;
13554 else if (b_bits == 0x0a)
13555 bf_regs = 2;
13556 else if (b_bits == 0x06)
13557 bf_regs = 3;
13558 else if (b_bits == 0x02)
13559 bf_regs = 4;
13560 else
13561 bf_regs = 0;
13562
13563 for (index_r = 0; index_r < bf_regs; index_r++)
13564 {
13565 for (index_e = 0; index_e < f_elem; index_e++)
13566 {
13567 record_buf_mem[index_m++] = f_ebytes;
13568 record_buf_mem[index_m++] = address;
13569 address = address + f_ebytes;
13570 thumb2_insn_r->mem_rec_count += 1;
13571 }
13572 }
13573 }
13574 /* Handle VST2. */
13575 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13576 {
13577 if (b_bits == 0x09 || b_bits == 0x08)
13578 bf_regs = 1;
13579 else if (b_bits == 0x03)
13580 bf_regs = 2;
13581 else
13582 bf_regs = 0;
13583
13584 for (index_r = 0; index_r < bf_regs; index_r++)
13585 for (index_e = 0; index_e < f_elem; index_e++)
13586 {
13587 for (loop_t = 0; loop_t < 2; loop_t++)
13588 {
13589 record_buf_mem[index_m++] = f_ebytes;
13590 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13591 thumb2_insn_r->mem_rec_count += 1;
13592 }
13593 address = address + (2 * f_ebytes);
13594 }
13595 }
13596 /* Handle VST3. */
13597 else if ((b_bits & 0x0e) == 0x04)
13598 {
13599 for (index_e = 0; index_e < f_elem; index_e++)
13600 {
13601 for (loop_t = 0; loop_t < 3; loop_t++)
13602 {
13603 record_buf_mem[index_m++] = f_ebytes;
13604 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13605 thumb2_insn_r->mem_rec_count += 1;
13606 }
13607 address = address + (3 * f_ebytes);
13608 }
13609 }
13610 /* Handle VST4. */
13611 else if (!(b_bits & 0x0e))
13612 {
13613 for (index_e = 0; index_e < f_elem; index_e++)
13614 {
13615 for (loop_t = 0; loop_t < 4; loop_t++)
13616 {
13617 record_buf_mem[index_m++] = f_ebytes;
13618 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13619 thumb2_insn_r->mem_rec_count += 1;
13620 }
13621 address = address + (4 * f_ebytes);
13622 }
13623 }
13624 }
13625 else
13626 {
13627 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13628
13629 if (bft_size == 0x00)
13630 f_ebytes = 1;
13631 else if (bft_size == 0x01)
13632 f_ebytes = 2;
13633 else if (bft_size == 0x02)
13634 f_ebytes = 4;
13635 else
13636 f_ebytes = 0;
13637
13638 /* Handle VST1. */
13639 if (!(b_bits & 0x0b) || b_bits == 0x08)
13640 thumb2_insn_r->mem_rec_count = 1;
13641 /* Handle VST2. */
13642 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13643 thumb2_insn_r->mem_rec_count = 2;
13644 /* Handle VST3. */
13645 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13646 thumb2_insn_r->mem_rec_count = 3;
13647 /* Handle VST4. */
13648 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13649 thumb2_insn_r->mem_rec_count = 4;
13650
13651 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13652 {
13653 record_buf_mem[index_m] = f_ebytes;
13654 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13655 }
13656 }
13657 }
13658 else
13659 {
13660 if (!a_bit)
13661 {
13662 /* Handle VLD1. */
13663 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13664 thumb2_insn_r->reg_rec_count = 1;
13665 /* Handle VLD2. */
13666 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13667 thumb2_insn_r->reg_rec_count = 2;
13668 /* Handle VLD3. */
13669 else if ((b_bits & 0x0e) == 0x04)
13670 thumb2_insn_r->reg_rec_count = 3;
13671 /* Handle VLD4. */
13672 else if (!(b_bits & 0x0e))
13673 thumb2_insn_r->reg_rec_count = 4;
13674 }
13675 else
13676 {
13677 /* Handle VLD1. */
13678 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13679 thumb2_insn_r->reg_rec_count = 1;
13680 /* Handle VLD2. */
13681 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13682 thumb2_insn_r->reg_rec_count = 2;
13683 /* Handle VLD3. */
13684 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13685 thumb2_insn_r->reg_rec_count = 3;
13686 /* Handle VLD4. */
13687 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13688 thumb2_insn_r->reg_rec_count = 4;
13689
13690 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13691 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13692 }
13693 }
13694
13695 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13696 {
13697 record_buf[index_r] = reg_rn;
13698 thumb2_insn_r->reg_rec_count += 1;
13699 }
13700
13701 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13702 record_buf);
13703 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13704 record_buf_mem);
13705 return 0;
13706 }
13707
13708 /* Decodes thumb2 instruction type and invokes its record handler. */
13709
13710 static unsigned int
13711 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13712 {
13713 uint32_t op, op1, op2;
13714
13715 op = bit (thumb2_insn_r->arm_insn, 15);
13716 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13717 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13718
13719 if (op1 == 0x01)
13720 {
13721 if (!(op2 & 0x64 ))
13722 {
13723 /* Load/store multiple instruction. */
13724 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13725 }
13726 else if (!((op2 & 0x64) ^ 0x04))
13727 {
13728 /* Load/store (dual/exclusive) and table branch instruction. */
13729 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13730 }
13731 else if (!((op2 & 0x20) ^ 0x20))
13732 {
13733 /* Data-processing (shifted register). */
13734 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13735 }
13736 else if (op2 & 0x40)
13737 {
13738 /* Co-processor instructions. */
13739 return thumb2_record_coproc_insn (thumb2_insn_r);
13740 }
13741 }
13742 else if (op1 == 0x02)
13743 {
13744 if (op)
13745 {
13746 /* Branches and miscellaneous control instructions. */
13747 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13748 }
13749 else if (op2 & 0x20)
13750 {
13751 /* Data-processing (plain binary immediate) instruction. */
13752 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13753 }
13754 else
13755 {
13756 /* Data-processing (modified immediate). */
13757 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13758 }
13759 }
13760 else if (op1 == 0x03)
13761 {
13762 if (!(op2 & 0x71 ))
13763 {
13764 /* Store single data item. */
13765 return thumb2_record_str_single_data (thumb2_insn_r);
13766 }
13767 else if (!((op2 & 0x71) ^ 0x10))
13768 {
13769 /* Advanced SIMD or structure load/store instructions. */
13770 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13771 }
13772 else if (!((op2 & 0x67) ^ 0x01))
13773 {
13774 /* Load byte, memory hints instruction. */
13775 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13776 }
13777 else if (!((op2 & 0x67) ^ 0x03))
13778 {
13779 /* Load halfword, memory hints instruction. */
13780 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13781 }
13782 else if (!((op2 & 0x67) ^ 0x05))
13783 {
13784 /* Load word instruction. */
13785 return thumb2_record_ld_word (thumb2_insn_r);
13786 }
13787 else if (!((op2 & 0x70) ^ 0x20))
13788 {
13789 /* Data-processing (register) instruction. */
13790 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13791 }
13792 else if (!((op2 & 0x78) ^ 0x30))
13793 {
13794 /* Multiply, multiply accumulate, abs diff instruction. */
13795 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13796 }
13797 else if (!((op2 & 0x78) ^ 0x38))
13798 {
13799 /* Long multiply, long multiply accumulate, and divide. */
13800 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13801 }
13802 else if (op2 & 0x40)
13803 {
13804 /* Co-processor instructions. */
13805 return thumb2_record_coproc_insn (thumb2_insn_r);
13806 }
13807 }
13808
13809 return -1;
13810 }
13811
13812 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13813 and positive val on fauilure. */
13814
13815 static int
13816 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13817 {
13818 gdb_byte buf[insn_size];
13819
13820 memset (&buf[0], 0, insn_size);
13821
13822 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13823 return 1;
13824 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13825 insn_size,
13826 gdbarch_byte_order_for_code (insn_record->gdbarch));
13827 return 0;
13828 }
13829
13830 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13831
13832 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13833 dispatch it. */
13834
13835 static int
13836 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13837 uint32_t insn_size)
13838 {
13839
13840 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13841 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13842 {
13843 arm_record_data_proc_misc_ld_str, /* 000. */
13844 arm_record_data_proc_imm, /* 001. */
13845 arm_record_ld_st_imm_offset, /* 010. */
13846 arm_record_ld_st_reg_offset, /* 011. */
13847 arm_record_ld_st_multiple, /* 100. */
13848 arm_record_b_bl, /* 101. */
13849 arm_record_asimd_vfp_coproc, /* 110. */
13850 arm_record_coproc_data_proc /* 111. */
13851 };
13852
13853 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13854 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13855 { \
13856 thumb_record_shift_add_sub, /* 000. */
13857 thumb_record_add_sub_cmp_mov, /* 001. */
13858 thumb_record_ld_st_reg_offset, /* 010. */
13859 thumb_record_ld_st_imm_offset, /* 011. */
13860 thumb_record_ld_st_stack, /* 100. */
13861 thumb_record_misc, /* 101. */
13862 thumb_record_ldm_stm_swi, /* 110. */
13863 thumb_record_branch /* 111. */
13864 };
13865
13866 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13867 uint32_t insn_id = 0;
13868
13869 if (extract_arm_insn (arm_record, insn_size))
13870 {
13871 if (record_debug)
13872 {
13873 printf_unfiltered (_("Process record: error reading memory at "
13874 "addr %s len = %d.\n"),
13875 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13876 }
13877 return -1;
13878 }
13879 else if (ARM_RECORD == record_type)
13880 {
13881 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13882 insn_id = bits (arm_record->arm_insn, 25, 27);
13883 ret = arm_record_extension_space (arm_record);
13884 /* If this insn has fallen into extension space
13885 then we need not decode it anymore. */
13886 if (ret != -1 && !INSN_RECORDED(arm_record))
13887 {
13888 ret = arm_handle_insn[insn_id] (arm_record);
13889 }
13890 }
13891 else if (THUMB_RECORD == record_type)
13892 {
13893 /* As thumb does not have condition codes, we set negative. */
13894 arm_record->cond = -1;
13895 insn_id = bits (arm_record->arm_insn, 13, 15);
13896 ret = thumb_handle_insn[insn_id] (arm_record);
13897 }
13898 else if (THUMB2_RECORD == record_type)
13899 {
13900 /* As thumb does not have condition codes, we set negative. */
13901 arm_record->cond = -1;
13902
13903 /* Swap first half of 32bit thumb instruction with second half. */
13904 arm_record->arm_insn
13905 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13906
13907 insn_id = thumb2_record_decode_insn_handler (arm_record);
13908
13909 if (insn_id != ARM_RECORD_SUCCESS)
13910 {
13911 arm_record_unsupported_insn (arm_record);
13912 ret = -1;
13913 }
13914 }
13915 else
13916 {
13917 /* Throw assertion. */
13918 gdb_assert_not_reached ("not a valid instruction, could not decode");
13919 }
13920
13921 return ret;
13922 }
13923
13924
13925 /* Cleans up local record registers and memory allocations. */
13926
13927 static void
13928 deallocate_reg_mem (insn_decode_record *record)
13929 {
13930 xfree (record->arm_regs);
13931 xfree (record->arm_mems);
13932 }
13933
13934
13935 /* Parse the current instruction and record the values of the registers and
13936 memory that will be changed in current instruction to record_arch_list".
13937 Return -1 if something is wrong. */
13938
13939 int
13940 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13941 CORE_ADDR insn_addr)
13942 {
13943
13944 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13945 uint32_t no_of_rec = 0;
13946 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13947 ULONGEST t_bit = 0, insn_id = 0;
13948
13949 ULONGEST u_regval = 0;
13950
13951 insn_decode_record arm_record;
13952
13953 memset (&arm_record, 0, sizeof (insn_decode_record));
13954 arm_record.regcache = regcache;
13955 arm_record.this_addr = insn_addr;
13956 arm_record.gdbarch = gdbarch;
13957
13958
13959 if (record_debug > 1)
13960 {
13961 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13962 "addr = %s\n",
13963 paddress (gdbarch, arm_record.this_addr));
13964 }
13965
13966 if (extract_arm_insn (&arm_record, 2))
13967 {
13968 if (record_debug)
13969 {
13970 printf_unfiltered (_("Process record: error reading memory at "
13971 "addr %s len = %d.\n"),
13972 paddress (arm_record.gdbarch,
13973 arm_record.this_addr), 2);
13974 }
13975 return -1;
13976 }
13977
13978 /* Check the insn, whether it is thumb or arm one. */
13979
13980 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13981 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13982
13983
13984 if (!(u_regval & t_bit))
13985 {
13986 /* We are decoding arm insn. */
13987 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13988 }
13989 else
13990 {
13991 insn_id = bits (arm_record.arm_insn, 11, 15);
13992 /* is it thumb2 insn? */
13993 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13994 {
13995 ret = decode_insn (&arm_record, THUMB2_RECORD,
13996 THUMB2_INSN_SIZE_BYTES);
13997 }
13998 else
13999 {
14000 /* We are decoding thumb insn. */
14001 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
14002 }
14003 }
14004
14005 if (0 == ret)
14006 {
14007 /* Record registers. */
14008 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14009 if (arm_record.arm_regs)
14010 {
14011 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14012 {
14013 if (record_full_arch_list_add_reg
14014 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14015 ret = -1;
14016 }
14017 }
14018 /* Record memories. */
14019 if (arm_record.arm_mems)
14020 {
14021 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14022 {
14023 if (record_full_arch_list_add_mem
14024 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14025 arm_record.arm_mems[no_of_rec].len))
14026 ret = -1;
14027 }
14028 }
14029
14030 if (record_full_arch_list_add_end ())
14031 ret = -1;
14032 }
14033
14034
14035 deallocate_reg_mem (&arm_record);
14036
14037 return ret;
14038 }
14039
This page took 0.533963 seconds and 4 git commands to generate.