[ARM] Fix vcmp with #0.0
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47
48 #include "arm-tdep.h"
49 #include "gdb/sim-arm.h"
50
51 #include "elf-bfd.h"
52 #include "coff/internal.h"
53 #include "elf/arm.h"
54
55 #include "vec.h"
56
57 #include "record.h"
58 #include "record-full.h"
59
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
67
68 static int arm_debug;
69
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
73
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
76
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
79
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
82
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
85
86 struct arm_mapping_symbol
87 {
88 bfd_vma value;
89 char type;
90 };
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
93
94 struct arm_per_objfile
95 {
96 VEC(arm_mapping_symbol_s) **section_maps;
97 };
98
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
102
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
106 {
107 "auto",
108 "softfpa",
109 "fpa",
110 "softvfp",
111 "vfp",
112 NULL
113 };
114
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
118
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
121 {
122 "auto",
123 "APCS",
124 "AAPCS",
125 NULL
126 };
127
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
131
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
134 {
135 "auto",
136 "arm",
137 "thumb",
138 NULL
139 };
140
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
143
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
150
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
153
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
158 static const struct
159 {
160 const char *name;
161 int regnum;
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
164 { "r0", 0 },
165 { "r1", 1 },
166 { "r2", 2 },
167 { "r3", 3 },
168 { "r4", 4 },
169 { "r5", 5 },
170 { "r6", 6 },
171 { "r7", 7 },
172 { "r8", 8 },
173 { "r9", 9 },
174 { "r10", 10 },
175 { "r11", 11 },
176 { "r12", 12 },
177 { "r13", 13 },
178 { "r14", 14 },
179 { "r15", 15 },
180 /* Synonyms (argument and variable registers). */
181 { "a1", 0 },
182 { "a2", 1 },
183 { "a3", 2 },
184 { "a4", 3 },
185 { "v1", 4 },
186 { "v2", 5 },
187 { "v3", 6 },
188 { "v4", 7 },
189 { "v5", 8 },
190 { "v6", 9 },
191 { "v7", 10 },
192 { "v8", 11 },
193 /* Other platform-specific names for r9. */
194 { "sb", 9 },
195 { "tr", 9 },
196 /* Special names. */
197 { "ip", 12 },
198 { "lr", 14 },
199 /* Names used by GCC (not listed in the ARM EABI). */
200 { "sl", 10 },
201 /* A special name from the older ATPCS. */
202 { "wr", 7 },
203 };
204
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
213
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
216
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
219
220 /* This is used to keep the bfd arch_info in sync with the disassembly
221 style. */
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
225
226 static void convert_from_extended (const struct floatformat *, const void *,
227 void *, int);
228 static void convert_to_extended (const struct floatformat *, void *,
229 const void *, int);
230
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
237
238 static int thumb_insn_size (unsigned short inst1);
239
240 struct arm_prologue_cache
241 {
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
245 CORE_ADDR prev_sp;
246
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
250
251 int framesize;
252
253 /* The register used to hold the frame pointer for this frame. */
254 int framereg;
255
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
258 };
259
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
264
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
267
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
269
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
275
276 /* Set to true if the 32-bit mode is in use. */
277
278 int arm_apcs_32 = 1;
279
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
281
282 int
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
284 {
285 if (gdbarch_tdep (gdbarch)->is_m)
286 return XPSR_T;
287 else
288 return CPSR_T;
289 }
290
291 /* Determine if FRAME is executing in Thumb mode. */
292
293 int
294 arm_frame_is_thumb (struct frame_info *frame)
295 {
296 CORE_ADDR cpsr;
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
298
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
304
305 return (cpsr & t_bit) != 0;
306 }
307
308 /* Callback for VEC_lower_bound. */
309
310 static inline int
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
313 {
314 return lhs->value < rhs->value;
315 }
316
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
320
321 static char
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
323 {
324 struct obj_section *sec;
325
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
328 if (sec != NULL)
329 {
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
333 0 };
334 unsigned int idx;
335
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
337 if (data != NULL)
338 {
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
341 {
342 struct arm_mapping_symbol *map_sym;
343
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
346
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
352 {
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
355 {
356 if (start)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
359 }
360 }
361
362 if (idx > 0)
363 {
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
365 if (start)
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
368 }
369 }
370 }
371 }
372
373 return 0;
374 }
375
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
379
380 int
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
382 {
383 struct bound_minimal_symbol sym;
384 char type;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
387
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
390 if (dsc)
391 {
392 if (debug_displaced)
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
398 }
399
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
402 return 1;
403
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
407
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
410 return 0;
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
412 return 1;
413
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
416 return 1;
417
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
420 if (type)
421 return type == 't';
422
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
425 if (sym.minsym)
426 return (MSYMBOL_IS_SPECIAL (sym.minsym));
427
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
430 return 0;
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
432 return 1;
433
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
441
442 /* Otherwise we're out of luck; we assume ARM. */
443 return 0;
444 }
445
446 /* Remove useless bits from addresses in a running program. */
447 static CORE_ADDR
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
449 {
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch)->is_m
453 && (val & 0xfffffff0) == 0xfffffff0)
454 return val;
455
456 if (arm_apcs_32)
457 return UNMAKE_THUMB_ADDR (val);
458 else
459 return (val & 0x03fffffc);
460 }
461
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
465 is being called. */
466 static int
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
468 {
469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470 struct bound_minimal_symbol msym;
471
472 msym = lookup_minimal_symbol_by_pc (pc);
473 if (msym.minsym != NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
476 {
477 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
478
479 /* The GNU linker's Thumb call stub to foo is named
480 __foo_from_thumb. */
481 if (strstr (name, "_from_thumb") != NULL)
482 name += 2;
483
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
486 functions. */
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
488 return 1;
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
490 return 1;
491
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
494 return 1;
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
496 return 1;
497 }
498 else
499 {
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
504
505 if (!is_thumb
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
510 return 1;
511 }
512
513 return 0;
514 }
515
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
524
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
527 instruction. */
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
533
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
539
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
541
542 static unsigned int
543 thumb_expand_immediate (unsigned int imm)
544 {
545 unsigned int count = imm >> 7;
546
547 if (count < 8)
548 switch (count / 2)
549 {
550 case 0:
551 return imm & 0xff;
552 case 1:
553 return (imm & 0xff) | ((imm & 0xff) << 16);
554 case 2:
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
556 case 3:
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
559 }
560
561 return (0x80 | (imm & 0x7f)) << (32 - count);
562 }
563
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
566
567 static int
568 thumb_instruction_changes_pc (unsigned short inst)
569 {
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
571 return 1;
572
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
574 return 1;
575
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
577 return 1;
578
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
580 return 1;
581
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
583 return 1;
584
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
586 return 1;
587
588 return 0;
589 }
590
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
593
594 static int
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
596 {
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
598 {
599 /* Branches and miscellaneous control instructions. */
600
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
602 {
603 /* B, BL, BLX. */
604 return 1;
605 }
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
607 {
608 /* SUBS PC, LR, #imm8. */
609 return 1;
610 }
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
612 {
613 /* Conditional branch. */
614 return 1;
615 }
616
617 return 0;
618 }
619
620 if ((inst1 & 0xfe50) == 0xe810)
621 {
622 /* Load multiple or RFE. */
623
624 if (bit (inst1, 7) && !bit (inst1, 8))
625 {
626 /* LDMIA or POP */
627 if (bit (inst2, 15))
628 return 1;
629 }
630 else if (!bit (inst1, 7) && bit (inst1, 8))
631 {
632 /* LDMDB */
633 if (bit (inst2, 15))
634 return 1;
635 }
636 else if (bit (inst1, 7) && bit (inst1, 8))
637 {
638 /* RFEIA */
639 return 1;
640 }
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
642 {
643 /* RFEDB */
644 return 1;
645 }
646
647 return 0;
648 }
649
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
651 {
652 /* MOV PC or MOVS PC. */
653 return 1;
654 }
655
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
657 {
658 /* LDR PC. */
659 if (bits (inst1, 0, 3) == 15)
660 return 1;
661 if (bit (inst1, 7))
662 return 1;
663 if (bit (inst2, 11))
664 return 1;
665 if ((inst2 & 0x0fc0) == 0x0000)
666 return 1;
667
668 return 0;
669 }
670
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
672 {
673 /* TBB. */
674 return 1;
675 }
676
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
678 {
679 /* TBH. */
680 return 1;
681 }
682
683 return 0;
684 }
685
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
688
689 static int
690 thumb_instruction_restores_sp (unsigned short insn)
691 {
692 return (insn == 0x46bd /* mov sp, r7 */
693 || (insn & 0xff80) == 0xb000 /* add sp, imm */
694 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
695 }
696
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
702
703 static CORE_ADDR
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 CORE_ADDR start, CORE_ADDR limit,
706 struct arm_prologue_cache *cache)
707 {
708 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
710 int i;
711 pv_t regs[16];
712 struct pv_area *stack;
713 struct cleanup *back_to;
714 CORE_ADDR offset;
715 CORE_ADDR unrecognized_pc = 0;
716
717 for (i = 0; i < 16; i++)
718 regs[i] = pv_register (i, 0);
719 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720 back_to = make_cleanup_free_pv_area (stack);
721
722 while (start < limit)
723 {
724 unsigned short insn;
725
726 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
727
728 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
729 {
730 int regno;
731 int mask;
732
733 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
734 break;
735
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask = (insn & 0xff) | ((insn & 0x100) << 6);
739
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 if (mask & (1 << regno))
743 {
744 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
745 -4);
746 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
747 }
748 }
749 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
750 {
751 offset = (insn & 0x7f) << 2; /* get scaled offset */
752 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
753 -offset);
754 }
755 else if (thumb_instruction_restores_sp (insn))
756 {
757 /* Don't scan past the epilogue. */
758 break;
759 }
760 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
762 (insn & 0xff) << 2);
763 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
766 bits (insn, 6, 8));
767 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
770 bits (insn, 0, 7));
771 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 && pv_is_constant (regs[bits (insn, 3, 5)]))
774 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 regs[bits (insn, 6, 8)]);
776 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs[bits (insn, 3, 6)]))
778 {
779 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 int rm = bits (insn, 3, 6);
781 regs[rd] = pv_add (regs[rd], regs[rm]);
782 }
783 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
784 {
785 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 int src_reg = (insn & 0x78) >> 3;
787 regs[dst_reg] = regs[src_reg];
788 }
789 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
790 {
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno = (insn >> 8) & 0x7;
795 pv_t addr;
796
797 offset = (insn & 0xff) << 2;
798 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
799
800 if (pv_area_store_would_trash (stack, addr))
801 break;
802
803 pv_area_store (stack, addr, 4, regs[regno]);
804 }
805 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
806 {
807 int rd = bits (insn, 0, 2);
808 int rn = bits (insn, 3, 5);
809 pv_t addr;
810
811 offset = bits (insn, 6, 10) << 2;
812 addr = pv_add_constant (regs[rn], offset);
813
814 if (pv_area_store_would_trash (stack, addr))
815 break;
816
817 pv_area_store (stack, addr, 4, regs[rd]);
818 }
819 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 /* Ignore stores of argument registers to the stack. */
823 ;
824 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
828 ;
829 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 /* Similarly ignore single loads from the stack. */
833 ;
834 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
838 ;
839 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
841 on Thumb. */
842 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
844 {
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant;
847 CORE_ADDR loc;
848
849 loc = start + 4 + bits (insn, 0, 7) * 4;
850 constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 regs[bits (insn, 8, 10)] = pv_constant (constant);
852 }
853 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
854 {
855 unsigned short inst2;
856
857 inst2 = read_memory_unsigned_integer (start + 2, 2,
858 byte_order_for_code);
859
860 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
861 {
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
865 CORE_ADDR nextpc;
866 int j1, j2, imm1, imm2;
867
868 imm1 = sbits (insn, 0, 10);
869 imm2 = bits (inst2, 0, 10);
870 j1 = bit (inst2, 13);
871 j2 = bit (inst2, 11);
872
873 offset = ((imm1 << 12) + (imm2 << 1));
874 offset ^= ((!j2) << 22) | ((!j1) << 23);
875
876 nextpc = start + 4 + offset;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2, 12) == 0)
879 nextpc = nextpc & 0xfffffffc;
880
881 if (!skip_prologue_function (gdbarch, nextpc,
882 bit (inst2, 12) != 0))
883 break;
884 }
885
886 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
887 { registers } */
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
889 {
890 pv_t addr = regs[bits (insn, 0, 3)];
891 int regno;
892
893 if (pv_area_store_would_trash (stack, addr))
894 break;
895
896 /* Calculate offsets of saved registers. */
897 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 if (inst2 & (1 << regno))
899 {
900 addr = pv_add_constant (addr, -4);
901 pv_area_store (stack, addr, 4, regs[regno]);
902 }
903
904 if (insn & 0x0020)
905 regs[bits (insn, 0, 3)] = addr;
906 }
907
908 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
909 [Rn, #+/-imm]{!} */
910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
911 {
912 int regno1 = bits (inst2, 12, 15);
913 int regno2 = bits (inst2, 8, 11);
914 pv_t addr = regs[bits (insn, 0, 3)];
915
916 offset = inst2 & 0xff;
917 if (insn & 0x0080)
918 addr = pv_add_constant (addr, offset);
919 else
920 addr = pv_add_constant (addr, -offset);
921
922 if (pv_area_store_would_trash (stack, addr))
923 break;
924
925 pv_area_store (stack, addr, 4, regs[regno1]);
926 pv_area_store (stack, pv_add_constant (addr, 4),
927 4, regs[regno2]);
928
929 if (insn & 0x0020)
930 regs[bits (insn, 0, 3)] = addr;
931 }
932
933 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2 & 0x0c00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 {
937 int regno = bits (inst2, 12, 15);
938 pv_t addr = regs[bits (insn, 0, 3)];
939
940 offset = inst2 & 0xff;
941 if (inst2 & 0x0200)
942 addr = pv_add_constant (addr, offset);
943 else
944 addr = pv_add_constant (addr, -offset);
945
946 if (pv_area_store_would_trash (stack, addr))
947 break;
948
949 pv_area_store (stack, addr, 4, regs[regno]);
950
951 if (inst2 & 0x0100)
952 regs[bits (insn, 0, 3)] = addr;
953 }
954
955 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
957 {
958 int regno = bits (inst2, 12, 15);
959 pv_t addr;
960
961 offset = inst2 & 0xfff;
962 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
963
964 if (pv_area_store_would_trash (stack, addr))
965 break;
966
967 pv_area_store (stack, addr, 4, regs[regno]);
968 }
969
970 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
973 ;
974
975 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Ignore stores of argument registers to the stack. */
979 ;
980
981 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
982 { registers } */
983 && (inst2 & 0x8000) == 0x0000
984 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
987 ;
988
989 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
990 [Rn, #+/-imm] */
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore dual loads from the stack. */
993 ;
994
995 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2 & 0x0d00) == 0x0c00
997 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 /* Similarly ignore single loads from the stack. */
999 ;
1000
1001 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 /* Similarly ignore single loads from the stack. */
1004 ;
1005
1006 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1008 {
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1012
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)],
1015 thumb_expand_immediate (imm));
1016 }
1017
1018 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1020 {
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1024
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1027 }
1028
1029 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1031 {
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1035
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)],
1038 - (CORE_ADDR) thumb_expand_immediate (imm));
1039 }
1040
1041 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2 & 0x8000) == 0x0000)
1043 {
1044 unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 | (bits (inst2, 12, 14) << 8)
1046 | bits (inst2, 0, 7));
1047
1048 regs[bits (inst2, 8, 11)]
1049 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1050 }
1051
1052 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1053 {
1054 unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 | (bits (inst2, 12, 14) << 8)
1056 | bits (inst2, 0, 7));
1057
1058 regs[bits (inst2, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm));
1060 }
1061
1062 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1063 {
1064 unsigned int imm
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1066
1067 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1068 }
1069
1070 else if (insn == 0xea5f /* mov.w Rd,Rm */
1071 && (inst2 & 0xf0f0) == 0)
1072 {
1073 int dst_reg = (inst2 & 0x0f00) >> 8;
1074 int src_reg = inst2 & 0xf;
1075 regs[dst_reg] = regs[src_reg];
1076 }
1077
1078 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1079 {
1080 /* Constant pool loads. */
1081 unsigned int constant;
1082 CORE_ADDR loc;
1083
1084 offset = bits (inst2, 0, 11);
1085 if (insn & 0x0080)
1086 loc = start + 4 + offset;
1087 else
1088 loc = start + 4 - offset;
1089
1090 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1092 }
1093
1094 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1095 {
1096 /* Constant pool loads. */
1097 unsigned int constant;
1098 CORE_ADDR loc;
1099
1100 offset = bits (inst2, 0, 7) << 2;
1101 if (insn & 0x0080)
1102 loc = start + 4 + offset;
1103 else
1104 loc = start + 4 - offset;
1105
1106 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1108
1109 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1111 }
1112
1113 else if (thumb2_instruction_changes_pc (insn, inst2))
1114 {
1115 /* Don't scan past anything that might change control flow. */
1116 break;
1117 }
1118 else
1119 {
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1123 }
1124
1125 start += 2;
1126 }
1127 else if (thumb_instruction_changes_pc (insn))
1128 {
1129 /* Don't scan past anything that might change control flow. */
1130 break;
1131 }
1132 else
1133 {
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc = start;
1137 }
1138
1139 start += 2;
1140 }
1141
1142 if (arm_debug)
1143 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch, start));
1145
1146 if (unrecognized_pc == 0)
1147 unrecognized_pc = start;
1148
1149 if (cache == NULL)
1150 {
1151 do_cleanups (back_to);
1152 return unrecognized_pc;
1153 }
1154
1155 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1156 {
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache->framereg = ARM_FP_REGNUM;
1159 cache->framesize = -regs[ARM_FP_REGNUM].k;
1160 }
1161 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1162 {
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache->framereg = THUMB_FP_REGNUM;
1165 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1166 }
1167 else
1168 {
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache->framereg = ARM_SP_REGNUM;
1171 cache->framesize = -regs[ARM_SP_REGNUM].k;
1172 }
1173
1174 for (i = 0; i < 16; i++)
1175 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176 cache->saved_regs[i].addr = offset;
1177
1178 do_cleanups (back_to);
1179 return unrecognized_pc;
1180 }
1181
1182
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1187 not recognized. */
1188
1189 static CORE_ADDR
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 unsigned int *destreg, int *offset)
1192 {
1193 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 unsigned int low, high, address;
1196
1197 address = 0;
1198 if (is_thumb)
1199 {
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1202
1203 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1204 {
1205 *destreg = bits (insn1, 8, 10);
1206 *offset = 2;
1207 address = bits (insn1, 0, 7);
1208 }
1209 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1210 {
1211 unsigned short insn2
1212 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1213
1214 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1215
1216 insn1
1217 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1218 insn2
1219 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1220
1221 /* movt Rd, #const */
1222 if ((insn1 & 0xfbc0) == 0xf2c0)
1223 {
1224 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1225 *destreg = bits (insn2, 8, 11);
1226 *offset = 8;
1227 address = (high << 16 | low);
1228 }
1229 }
1230 }
1231 else
1232 {
1233 unsigned int insn
1234 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1235
1236 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1237 {
1238 address = bits (insn, 0, 11);
1239 *destreg = bits (insn, 12, 15);
1240 *offset = 4;
1241 }
1242 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1243 {
1244 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1245
1246 insn
1247 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1248
1249 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1250 {
1251 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1252 *destreg = bits (insn, 12, 15);
1253 *offset = 8;
1254 address = (high << 16 | low);
1255 }
1256 }
1257 }
1258
1259 return address;
1260 }
1261
1262 /* Try to skip a sequence of instructions used for stack protector. If PC
1263 points to the first instruction of this sequence, return the address of
1264 first instruction after this sequence, otherwise, return original PC.
1265
1266 On arm, this sequence of instructions is composed of mainly three steps,
1267 Step 1: load symbol __stack_chk_guard,
1268 Step 2: load from address of __stack_chk_guard,
1269 Step 3: store it to somewhere else.
1270
1271 Usually, instructions on step 2 and step 3 are the same on various ARM
1272 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1273 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1274 instructions in step 1 vary from different ARM architectures. On ARMv7,
1275 they are,
1276
1277 movw Rn, #:lower16:__stack_chk_guard
1278 movt Rn, #:upper16:__stack_chk_guard
1279
1280 On ARMv5t, it is,
1281
1282 ldr Rn, .Label
1283 ....
1284 .Lable:
1285 .word __stack_chk_guard
1286
1287 Since ldr/str is a very popular instruction, we can't use them as
1288 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1289 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1290 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1291
1292 static CORE_ADDR
1293 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1294 {
1295 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1296 unsigned int basereg;
1297 struct bound_minimal_symbol stack_chk_guard;
1298 int offset;
1299 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1300 CORE_ADDR addr;
1301
1302 /* Try to parse the instructions in Step 1. */
1303 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1304 &basereg, &offset);
1305 if (!addr)
1306 return pc;
1307
1308 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1309 /* If name of symbol doesn't start with '__stack_chk_guard', this
1310 instruction sequence is not for stack protector. If symbol is
1311 removed, we conservatively think this sequence is for stack protector. */
1312 if (stack_chk_guard.minsym
1313 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1314 "__stack_chk_guard",
1315 strlen ("__stack_chk_guard")) != 0)
1316 return pc;
1317
1318 if (is_thumb)
1319 {
1320 unsigned int destreg;
1321 unsigned short insn
1322 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1323
1324 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1325 if ((insn & 0xf800) != 0x6800)
1326 return pc;
1327 if (bits (insn, 3, 5) != basereg)
1328 return pc;
1329 destreg = bits (insn, 0, 2);
1330
1331 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1332 byte_order_for_code);
1333 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1334 if ((insn & 0xf800) != 0x6000)
1335 return pc;
1336 if (destreg != bits (insn, 0, 2))
1337 return pc;
1338 }
1339 else
1340 {
1341 unsigned int destreg;
1342 unsigned int insn
1343 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1344
1345 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1346 if ((insn & 0x0e500000) != 0x04100000)
1347 return pc;
1348 if (bits (insn, 16, 19) != basereg)
1349 return pc;
1350 destreg = bits (insn, 12, 15);
1351 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1352 insn = read_memory_unsigned_integer (pc + offset + 4,
1353 4, byte_order_for_code);
1354 if ((insn & 0x0e500000) != 0x04000000)
1355 return pc;
1356 if (bits (insn, 12, 15) != destreg)
1357 return pc;
1358 }
1359 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1360 on arm. */
1361 if (is_thumb)
1362 return pc + offset + 4;
1363 else
1364 return pc + offset + 8;
1365 }
1366
1367 /* Advance the PC across any function entry prologue instructions to
1368 reach some "real" code.
1369
1370 The APCS (ARM Procedure Call Standard) defines the following
1371 prologue:
1372
1373 mov ip, sp
1374 [stmfd sp!, {a1,a2,a3,a4}]
1375 stmfd sp!, {...,fp,ip,lr,pc}
1376 [stfe f7, [sp, #-12]!]
1377 [stfe f6, [sp, #-12]!]
1378 [stfe f5, [sp, #-12]!]
1379 [stfe f4, [sp, #-12]!]
1380 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1381
1382 static CORE_ADDR
1383 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1384 {
1385 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1386 unsigned long inst;
1387 CORE_ADDR skip_pc;
1388 CORE_ADDR func_addr, limit_pc;
1389
1390 /* See if we can determine the end of the prologue via the symbol table.
1391 If so, then return either PC, or the PC after the prologue, whichever
1392 is greater. */
1393 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1394 {
1395 CORE_ADDR post_prologue_pc
1396 = skip_prologue_using_sal (gdbarch, func_addr);
1397 struct symtab *s = find_pc_symtab (func_addr);
1398
1399 if (post_prologue_pc)
1400 post_prologue_pc
1401 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1402
1403
1404 /* GCC always emits a line note before the prologue and another
1405 one after, even if the two are at the same address or on the
1406 same line. Take advantage of this so that we do not need to
1407 know every instruction that might appear in the prologue. We
1408 will have producer information for most binaries; if it is
1409 missing (e.g. for -gstabs), assuming the GNU tools. */
1410 if (post_prologue_pc
1411 && (s == NULL
1412 || s->producer == NULL
1413 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1414 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1415 return post_prologue_pc;
1416
1417 if (post_prologue_pc != 0)
1418 {
1419 CORE_ADDR analyzed_limit;
1420
1421 /* For non-GCC compilers, make sure the entire line is an
1422 acceptable prologue; GDB will round this function's
1423 return value up to the end of the following line so we
1424 can not skip just part of a line (and we do not want to).
1425
1426 RealView does not treat the prologue specially, but does
1427 associate prologue code with the opening brace; so this
1428 lets us skip the first line if we think it is the opening
1429 brace. */
1430 if (arm_pc_is_thumb (gdbarch, func_addr))
1431 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1432 post_prologue_pc, NULL);
1433 else
1434 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1435 post_prologue_pc, NULL);
1436
1437 if (analyzed_limit != post_prologue_pc)
1438 return func_addr;
1439
1440 return post_prologue_pc;
1441 }
1442 }
1443
1444 /* Can't determine prologue from the symbol table, need to examine
1445 instructions. */
1446
1447 /* Find an upper limit on the function prologue using the debug
1448 information. If the debug information could not be used to provide
1449 that bound, then use an arbitrary large number as the upper bound. */
1450 /* Like arm_scan_prologue, stop no later than pc + 64. */
1451 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1452 if (limit_pc == 0)
1453 limit_pc = pc + 64; /* Magic. */
1454
1455
1456 /* Check if this is Thumb code. */
1457 if (arm_pc_is_thumb (gdbarch, pc))
1458 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1459
1460 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1461 {
1462 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1463
1464 /* "mov ip, sp" is no longer a required part of the prologue. */
1465 if (inst == 0xe1a0c00d) /* mov ip, sp */
1466 continue;
1467
1468 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1469 continue;
1470
1471 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1472 continue;
1473
1474 /* Some prologues begin with "str lr, [sp, #-4]!". */
1475 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1476 continue;
1477
1478 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1479 continue;
1480
1481 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1482 continue;
1483
1484 /* Any insns after this point may float into the code, if it makes
1485 for better instruction scheduling, so we skip them only if we
1486 find them, but still consider the function to be frame-ful. */
1487
1488 /* We may have either one sfmfd instruction here, or several stfe
1489 insns, depending on the version of floating point code we
1490 support. */
1491 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1492 continue;
1493
1494 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1495 continue;
1496
1497 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1498 continue;
1499
1500 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1501 continue;
1502
1503 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1504 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1505 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1506 continue;
1507
1508 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1509 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1510 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1511 continue;
1512
1513 /* Un-recognized instruction; stop scanning. */
1514 break;
1515 }
1516
1517 return skip_pc; /* End of prologue. */
1518 }
1519
1520 /* *INDENT-OFF* */
1521 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1522 This function decodes a Thumb function prologue to determine:
1523 1) the size of the stack frame
1524 2) which registers are saved on it
1525 3) the offsets of saved regs
1526 4) the offset from the stack pointer to the frame pointer
1527
1528 A typical Thumb function prologue would create this stack frame
1529 (offsets relative to FP)
1530 old SP -> 24 stack parameters
1531 20 LR
1532 16 R7
1533 R7 -> 0 local variables (16 bytes)
1534 SP -> -12 additional stack space (12 bytes)
1535 The frame size would thus be 36 bytes, and the frame offset would be
1536 12 bytes. The frame register is R7.
1537
1538 The comments for thumb_skip_prolog() describe the algorithm we use
1539 to detect the end of the prolog. */
1540 /* *INDENT-ON* */
1541
1542 static void
1543 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1544 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1545 {
1546 CORE_ADDR prologue_start;
1547 CORE_ADDR prologue_end;
1548
1549 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1550 &prologue_end))
1551 {
1552 /* See comment in arm_scan_prologue for an explanation of
1553 this heuristics. */
1554 if (prologue_end > prologue_start + 64)
1555 {
1556 prologue_end = prologue_start + 64;
1557 }
1558 }
1559 else
1560 /* We're in the boondocks: we have no idea where the start of the
1561 function is. */
1562 return;
1563
1564 prologue_end = min (prologue_end, prev_pc);
1565
1566 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1567 }
1568
1569 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1570
1571 static int
1572 arm_instruction_changes_pc (uint32_t this_instr)
1573 {
1574 if (bits (this_instr, 28, 31) == INST_NV)
1575 /* Unconditional instructions. */
1576 switch (bits (this_instr, 24, 27))
1577 {
1578 case 0xa:
1579 case 0xb:
1580 /* Branch with Link and change to Thumb. */
1581 return 1;
1582 case 0xc:
1583 case 0xd:
1584 case 0xe:
1585 /* Coprocessor register transfer. */
1586 if (bits (this_instr, 12, 15) == 15)
1587 error (_("Invalid update to pc in instruction"));
1588 return 0;
1589 default:
1590 return 0;
1591 }
1592 else
1593 switch (bits (this_instr, 25, 27))
1594 {
1595 case 0x0:
1596 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1597 {
1598 /* Multiplies and extra load/stores. */
1599 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1600 /* Neither multiplies nor extension load/stores are allowed
1601 to modify PC. */
1602 return 0;
1603
1604 /* Otherwise, miscellaneous instructions. */
1605
1606 /* BX <reg>, BXJ <reg>, BLX <reg> */
1607 if (bits (this_instr, 4, 27) == 0x12fff1
1608 || bits (this_instr, 4, 27) == 0x12fff2
1609 || bits (this_instr, 4, 27) == 0x12fff3)
1610 return 1;
1611
1612 /* Other miscellaneous instructions are unpredictable if they
1613 modify PC. */
1614 return 0;
1615 }
1616 /* Data processing instruction. Fall through. */
1617
1618 case 0x1:
1619 if (bits (this_instr, 12, 15) == 15)
1620 return 1;
1621 else
1622 return 0;
1623
1624 case 0x2:
1625 case 0x3:
1626 /* Media instructions and architecturally undefined instructions. */
1627 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1628 return 0;
1629
1630 /* Stores. */
1631 if (bit (this_instr, 20) == 0)
1632 return 0;
1633
1634 /* Loads. */
1635 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1636 return 1;
1637 else
1638 return 0;
1639
1640 case 0x4:
1641 /* Load/store multiple. */
1642 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1643 return 1;
1644 else
1645 return 0;
1646
1647 case 0x5:
1648 /* Branch and branch with link. */
1649 return 1;
1650
1651 case 0x6:
1652 case 0x7:
1653 /* Coprocessor transfers or SWIs can not affect PC. */
1654 return 0;
1655
1656 default:
1657 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1658 }
1659 }
1660
1661 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1662 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1663 fill it in. Return the first address not recognized as a prologue
1664 instruction.
1665
1666 We recognize all the instructions typically found in ARM prologues,
1667 plus harmless instructions which can be skipped (either for analysis
1668 purposes, or a more restrictive set that can be skipped when finding
1669 the end of the prologue). */
1670
1671 static CORE_ADDR
1672 arm_analyze_prologue (struct gdbarch *gdbarch,
1673 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1674 struct arm_prologue_cache *cache)
1675 {
1676 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1677 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1678 int regno;
1679 CORE_ADDR offset, current_pc;
1680 pv_t regs[ARM_FPS_REGNUM];
1681 struct pv_area *stack;
1682 struct cleanup *back_to;
1683 int framereg, framesize;
1684 CORE_ADDR unrecognized_pc = 0;
1685
1686 /* Search the prologue looking for instructions that set up the
1687 frame pointer, adjust the stack pointer, and save registers.
1688
1689 Be careful, however, and if it doesn't look like a prologue,
1690 don't try to scan it. If, for instance, a frameless function
1691 begins with stmfd sp!, then we will tell ourselves there is
1692 a frame, which will confuse stack traceback, as well as "finish"
1693 and other operations that rely on a knowledge of the stack
1694 traceback. */
1695
1696 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1697 regs[regno] = pv_register (regno, 0);
1698 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1699 back_to = make_cleanup_free_pv_area (stack);
1700
1701 for (current_pc = prologue_start;
1702 current_pc < prologue_end;
1703 current_pc += 4)
1704 {
1705 unsigned int insn
1706 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1707
1708 if (insn == 0xe1a0c00d) /* mov ip, sp */
1709 {
1710 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1711 continue;
1712 }
1713 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1714 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1715 {
1716 unsigned imm = insn & 0xff; /* immediate value */
1717 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1718 int rd = bits (insn, 12, 15);
1719 imm = (imm >> rot) | (imm << (32 - rot));
1720 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1721 continue;
1722 }
1723 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1724 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1725 {
1726 unsigned imm = insn & 0xff; /* immediate value */
1727 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1728 int rd = bits (insn, 12, 15);
1729 imm = (imm >> rot) | (imm << (32 - rot));
1730 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1731 continue;
1732 }
1733 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1734 [sp, #-4]! */
1735 {
1736 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1737 break;
1738 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1739 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1740 regs[bits (insn, 12, 15)]);
1741 continue;
1742 }
1743 else if ((insn & 0xffff0000) == 0xe92d0000)
1744 /* stmfd sp!, {..., fp, ip, lr, pc}
1745 or
1746 stmfd sp!, {a1, a2, a3, a4} */
1747 {
1748 int mask = insn & 0xffff;
1749
1750 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1751 break;
1752
1753 /* Calculate offsets of saved registers. */
1754 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1755 if (mask & (1 << regno))
1756 {
1757 regs[ARM_SP_REGNUM]
1758 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1759 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1760 }
1761 }
1762 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1763 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1764 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1765 {
1766 /* No need to add this to saved_regs -- it's just an arg reg. */
1767 continue;
1768 }
1769 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1770 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1771 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1772 {
1773 /* No need to add this to saved_regs -- it's just an arg reg. */
1774 continue;
1775 }
1776 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1777 { registers } */
1778 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1779 {
1780 /* No need to add this to saved_regs -- it's just arg regs. */
1781 continue;
1782 }
1783 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1784 {
1785 unsigned imm = insn & 0xff; /* immediate value */
1786 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1787 imm = (imm >> rot) | (imm << (32 - rot));
1788 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1789 }
1790 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1791 {
1792 unsigned imm = insn & 0xff; /* immediate value */
1793 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1794 imm = (imm >> rot) | (imm << (32 - rot));
1795 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1796 }
1797 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1798 [sp, -#c]! */
1799 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1800 {
1801 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1802 break;
1803
1804 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1805 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1806 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1807 }
1808 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1809 [sp!] */
1810 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1811 {
1812 int n_saved_fp_regs;
1813 unsigned int fp_start_reg, fp_bound_reg;
1814
1815 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1816 break;
1817
1818 if ((insn & 0x800) == 0x800) /* N0 is set */
1819 {
1820 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1821 n_saved_fp_regs = 3;
1822 else
1823 n_saved_fp_regs = 1;
1824 }
1825 else
1826 {
1827 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1828 n_saved_fp_regs = 2;
1829 else
1830 n_saved_fp_regs = 4;
1831 }
1832
1833 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1834 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1835 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1836 {
1837 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1838 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1839 regs[fp_start_reg++]);
1840 }
1841 }
1842 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1843 {
1844 /* Allow some special function calls when skipping the
1845 prologue; GCC generates these before storing arguments to
1846 the stack. */
1847 CORE_ADDR dest = BranchDest (current_pc, insn);
1848
1849 if (skip_prologue_function (gdbarch, dest, 0))
1850 continue;
1851 else
1852 break;
1853 }
1854 else if ((insn & 0xf0000000) != 0xe0000000)
1855 break; /* Condition not true, exit early. */
1856 else if (arm_instruction_changes_pc (insn))
1857 /* Don't scan past anything that might change control flow. */
1858 break;
1859 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1860 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1861 /* Ignore block loads from the stack, potentially copying
1862 parameters from memory. */
1863 continue;
1864 else if ((insn & 0xfc500000) == 0xe4100000
1865 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1866 /* Similarly ignore single loads from the stack. */
1867 continue;
1868 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1869 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1870 register instead of the stack. */
1871 continue;
1872 else
1873 {
1874 /* The optimizer might shove anything into the prologue,
1875 so we just skip what we don't recognize. */
1876 unrecognized_pc = current_pc;
1877 continue;
1878 }
1879 }
1880
1881 if (unrecognized_pc == 0)
1882 unrecognized_pc = current_pc;
1883
1884 /* The frame size is just the distance from the frame register
1885 to the original stack pointer. */
1886 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1887 {
1888 /* Frame pointer is fp. */
1889 framereg = ARM_FP_REGNUM;
1890 framesize = -regs[ARM_FP_REGNUM].k;
1891 }
1892 else
1893 {
1894 /* Try the stack pointer... this is a bit desperate. */
1895 framereg = ARM_SP_REGNUM;
1896 framesize = -regs[ARM_SP_REGNUM].k;
1897 }
1898
1899 if (cache)
1900 {
1901 cache->framereg = framereg;
1902 cache->framesize = framesize;
1903
1904 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1905 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1906 cache->saved_regs[regno].addr = offset;
1907 }
1908
1909 if (arm_debug)
1910 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1911 paddress (gdbarch, unrecognized_pc));
1912
1913 do_cleanups (back_to);
1914 return unrecognized_pc;
1915 }
1916
1917 static void
1918 arm_scan_prologue (struct frame_info *this_frame,
1919 struct arm_prologue_cache *cache)
1920 {
1921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1923 int regno;
1924 CORE_ADDR prologue_start, prologue_end, current_pc;
1925 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1926 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1927 pv_t regs[ARM_FPS_REGNUM];
1928 struct pv_area *stack;
1929 struct cleanup *back_to;
1930 CORE_ADDR offset;
1931
1932 /* Assume there is no frame until proven otherwise. */
1933 cache->framereg = ARM_SP_REGNUM;
1934 cache->framesize = 0;
1935
1936 /* Check for Thumb prologue. */
1937 if (arm_frame_is_thumb (this_frame))
1938 {
1939 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1940 return;
1941 }
1942
1943 /* Find the function prologue. If we can't find the function in
1944 the symbol table, peek in the stack frame to find the PC. */
1945 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1946 &prologue_end))
1947 {
1948 /* One way to find the end of the prologue (which works well
1949 for unoptimized code) is to do the following:
1950
1951 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1952
1953 if (sal.line == 0)
1954 prologue_end = prev_pc;
1955 else if (sal.end < prologue_end)
1956 prologue_end = sal.end;
1957
1958 This mechanism is very accurate so long as the optimizer
1959 doesn't move any instructions from the function body into the
1960 prologue. If this happens, sal.end will be the last
1961 instruction in the first hunk of prologue code just before
1962 the first instruction that the scheduler has moved from
1963 the body to the prologue.
1964
1965 In order to make sure that we scan all of the prologue
1966 instructions, we use a slightly less accurate mechanism which
1967 may scan more than necessary. To help compensate for this
1968 lack of accuracy, the prologue scanning loop below contains
1969 several clauses which'll cause the loop to terminate early if
1970 an implausible prologue instruction is encountered.
1971
1972 The expression
1973
1974 prologue_start + 64
1975
1976 is a suitable endpoint since it accounts for the largest
1977 possible prologue plus up to five instructions inserted by
1978 the scheduler. */
1979
1980 if (prologue_end > prologue_start + 64)
1981 {
1982 prologue_end = prologue_start + 64; /* See above. */
1983 }
1984 }
1985 else
1986 {
1987 /* We have no symbol information. Our only option is to assume this
1988 function has a standard stack frame and the normal frame register.
1989 Then, we can find the value of our frame pointer on entrance to
1990 the callee (or at the present moment if this is the innermost frame).
1991 The value stored there should be the address of the stmfd + 8. */
1992 CORE_ADDR frame_loc;
1993 LONGEST return_value;
1994
1995 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1996 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1997 return;
1998 else
1999 {
2000 prologue_start = gdbarch_addr_bits_remove
2001 (gdbarch, return_value) - 8;
2002 prologue_end = prologue_start + 64; /* See above. */
2003 }
2004 }
2005
2006 if (prev_pc < prologue_end)
2007 prologue_end = prev_pc;
2008
2009 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2010 }
2011
2012 static struct arm_prologue_cache *
2013 arm_make_prologue_cache (struct frame_info *this_frame)
2014 {
2015 int reg;
2016 struct arm_prologue_cache *cache;
2017 CORE_ADDR unwound_fp;
2018
2019 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2020 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2021
2022 arm_scan_prologue (this_frame, cache);
2023
2024 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2025 if (unwound_fp == 0)
2026 return cache;
2027
2028 cache->prev_sp = unwound_fp + cache->framesize;
2029
2030 /* Calculate actual addresses of saved registers using offsets
2031 determined by arm_scan_prologue. */
2032 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2033 if (trad_frame_addr_p (cache->saved_regs, reg))
2034 cache->saved_regs[reg].addr += cache->prev_sp;
2035
2036 return cache;
2037 }
2038
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2041
2042 static void
2043 arm_prologue_this_id (struct frame_info *this_frame,
2044 void **this_cache,
2045 struct frame_id *this_id)
2046 {
2047 struct arm_prologue_cache *cache;
2048 struct frame_id id;
2049 CORE_ADDR pc, func;
2050
2051 if (*this_cache == NULL)
2052 *this_cache = arm_make_prologue_cache (this_frame);
2053 cache = *this_cache;
2054
2055 /* This is meant to halt the backtrace at "_start". */
2056 pc = get_frame_pc (this_frame);
2057 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2058 return;
2059
2060 /* If we've hit a wall, stop. */
2061 if (cache->prev_sp == 0)
2062 return;
2063
2064 /* Use function start address as part of the frame ID. If we cannot
2065 identify the start address (due to missing symbol information),
2066 fall back to just using the current PC. */
2067 func = get_frame_func (this_frame);
2068 if (!func)
2069 func = pc;
2070
2071 id = frame_id_build (cache->prev_sp, func);
2072 *this_id = id;
2073 }
2074
2075 static struct value *
2076 arm_prologue_prev_register (struct frame_info *this_frame,
2077 void **this_cache,
2078 int prev_regnum)
2079 {
2080 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2081 struct arm_prologue_cache *cache;
2082
2083 if (*this_cache == NULL)
2084 *this_cache = arm_make_prologue_cache (this_frame);
2085 cache = *this_cache;
2086
2087 /* If we are asked to unwind the PC, then we need to return the LR
2088 instead. The prologue may save PC, but it will point into this
2089 frame's prologue, not the next frame's resume location. Also
2090 strip the saved T bit. A valid LR may have the low bit set, but
2091 a valid PC never does. */
2092 if (prev_regnum == ARM_PC_REGNUM)
2093 {
2094 CORE_ADDR lr;
2095
2096 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2097 return frame_unwind_got_constant (this_frame, prev_regnum,
2098 arm_addr_bits_remove (gdbarch, lr));
2099 }
2100
2101 /* SP is generally not saved to the stack, but this frame is
2102 identified by the next frame's stack pointer at the time of the call.
2103 The value was already reconstructed into PREV_SP. */
2104 if (prev_regnum == ARM_SP_REGNUM)
2105 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2106
2107 /* The CPSR may have been changed by the call instruction and by the
2108 called function. The only bit we can reconstruct is the T bit,
2109 by checking the low bit of LR as of the call. This is a reliable
2110 indicator of Thumb-ness except for some ARM v4T pre-interworking
2111 Thumb code, which could get away with a clear low bit as long as
2112 the called function did not use bx. Guess that all other
2113 bits are unchanged; the condition flags are presumably lost,
2114 but the processor status is likely valid. */
2115 if (prev_regnum == ARM_PS_REGNUM)
2116 {
2117 CORE_ADDR lr, cpsr;
2118 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2119
2120 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2121 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2122 if (IS_THUMB_ADDR (lr))
2123 cpsr |= t_bit;
2124 else
2125 cpsr &= ~t_bit;
2126 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2127 }
2128
2129 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2130 prev_regnum);
2131 }
2132
2133 struct frame_unwind arm_prologue_unwind = {
2134 NORMAL_FRAME,
2135 default_frame_unwind_stop_reason,
2136 arm_prologue_this_id,
2137 arm_prologue_prev_register,
2138 NULL,
2139 default_frame_sniffer
2140 };
2141
2142 /* Maintain a list of ARM exception table entries per objfile, similar to the
2143 list of mapping symbols. We only cache entries for standard ARM-defined
2144 personality routines; the cache will contain only the frame unwinding
2145 instructions associated with the entry (not the descriptors). */
2146
2147 static const struct objfile_data *arm_exidx_data_key;
2148
2149 struct arm_exidx_entry
2150 {
2151 bfd_vma addr;
2152 gdb_byte *entry;
2153 };
2154 typedef struct arm_exidx_entry arm_exidx_entry_s;
2155 DEF_VEC_O(arm_exidx_entry_s);
2156
2157 struct arm_exidx_data
2158 {
2159 VEC(arm_exidx_entry_s) **section_maps;
2160 };
2161
2162 static void
2163 arm_exidx_data_free (struct objfile *objfile, void *arg)
2164 {
2165 struct arm_exidx_data *data = arg;
2166 unsigned int i;
2167
2168 for (i = 0; i < objfile->obfd->section_count; i++)
2169 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2170 }
2171
2172 static inline int
2173 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2174 const struct arm_exidx_entry *rhs)
2175 {
2176 return lhs->addr < rhs->addr;
2177 }
2178
2179 static struct obj_section *
2180 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2181 {
2182 struct obj_section *osect;
2183
2184 ALL_OBJFILE_OSECTIONS (objfile, osect)
2185 if (bfd_get_section_flags (objfile->obfd,
2186 osect->the_bfd_section) & SEC_ALLOC)
2187 {
2188 bfd_vma start, size;
2189 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2190 size = bfd_get_section_size (osect->the_bfd_section);
2191
2192 if (start <= vma && vma < start + size)
2193 return osect;
2194 }
2195
2196 return NULL;
2197 }
2198
2199 /* Parse contents of exception table and exception index sections
2200 of OBJFILE, and fill in the exception table entry cache.
2201
2202 For each entry that refers to a standard ARM-defined personality
2203 routine, extract the frame unwinding instructions (from either
2204 the index or the table section). The unwinding instructions
2205 are normalized by:
2206 - extracting them from the rest of the table data
2207 - converting to host endianness
2208 - appending the implicit 0xb0 ("Finish") code
2209
2210 The extracted and normalized instructions are stored for later
2211 retrieval by the arm_find_exidx_entry routine. */
2212
2213 static void
2214 arm_exidx_new_objfile (struct objfile *objfile)
2215 {
2216 struct cleanup *cleanups;
2217 struct arm_exidx_data *data;
2218 asection *exidx, *extab;
2219 bfd_vma exidx_vma = 0, extab_vma = 0;
2220 bfd_size_type exidx_size = 0, extab_size = 0;
2221 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2222 LONGEST i;
2223
2224 /* If we've already touched this file, do nothing. */
2225 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2226 return;
2227 cleanups = make_cleanup (null_cleanup, NULL);
2228
2229 /* Read contents of exception table and index. */
2230 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2231 if (exidx)
2232 {
2233 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2234 exidx_size = bfd_get_section_size (exidx);
2235 exidx_data = xmalloc (exidx_size);
2236 make_cleanup (xfree, exidx_data);
2237
2238 if (!bfd_get_section_contents (objfile->obfd, exidx,
2239 exidx_data, 0, exidx_size))
2240 {
2241 do_cleanups (cleanups);
2242 return;
2243 }
2244 }
2245
2246 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2247 if (extab)
2248 {
2249 extab_vma = bfd_section_vma (objfile->obfd, extab);
2250 extab_size = bfd_get_section_size (extab);
2251 extab_data = xmalloc (extab_size);
2252 make_cleanup (xfree, extab_data);
2253
2254 if (!bfd_get_section_contents (objfile->obfd, extab,
2255 extab_data, 0, extab_size))
2256 {
2257 do_cleanups (cleanups);
2258 return;
2259 }
2260 }
2261
2262 /* Allocate exception table data structure. */
2263 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2264 set_objfile_data (objfile, arm_exidx_data_key, data);
2265 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2266 objfile->obfd->section_count,
2267 VEC(arm_exidx_entry_s) *);
2268
2269 /* Fill in exception table. */
2270 for (i = 0; i < exidx_size / 8; i++)
2271 {
2272 struct arm_exidx_entry new_exidx_entry;
2273 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2274 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2275 bfd_vma addr = 0, word = 0;
2276 int n_bytes = 0, n_words = 0;
2277 struct obj_section *sec;
2278 gdb_byte *entry = NULL;
2279
2280 /* Extract address of start of function. */
2281 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2282 idx += exidx_vma + i * 8;
2283
2284 /* Find section containing function and compute section offset. */
2285 sec = arm_obj_section_from_vma (objfile, idx);
2286 if (sec == NULL)
2287 continue;
2288 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2289
2290 /* Determine address of exception table entry. */
2291 if (val == 1)
2292 {
2293 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2294 }
2295 else if ((val & 0xff000000) == 0x80000000)
2296 {
2297 /* Exception table entry embedded in .ARM.exidx
2298 -- must be short form. */
2299 word = val;
2300 n_bytes = 3;
2301 }
2302 else if (!(val & 0x80000000))
2303 {
2304 /* Exception table entry in .ARM.extab. */
2305 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2306 addr += exidx_vma + i * 8 + 4;
2307
2308 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2309 {
2310 word = bfd_h_get_32 (objfile->obfd,
2311 extab_data + addr - extab_vma);
2312 addr += 4;
2313
2314 if ((word & 0xff000000) == 0x80000000)
2315 {
2316 /* Short form. */
2317 n_bytes = 3;
2318 }
2319 else if ((word & 0xff000000) == 0x81000000
2320 || (word & 0xff000000) == 0x82000000)
2321 {
2322 /* Long form. */
2323 n_bytes = 2;
2324 n_words = ((word >> 16) & 0xff);
2325 }
2326 else if (!(word & 0x80000000))
2327 {
2328 bfd_vma pers;
2329 struct obj_section *pers_sec;
2330 int gnu_personality = 0;
2331
2332 /* Custom personality routine. */
2333 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2334 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2335
2336 /* Check whether we've got one of the variants of the
2337 GNU personality routines. */
2338 pers_sec = arm_obj_section_from_vma (objfile, pers);
2339 if (pers_sec)
2340 {
2341 static const char *personality[] =
2342 {
2343 "__gcc_personality_v0",
2344 "__gxx_personality_v0",
2345 "__gcj_personality_v0",
2346 "__gnu_objc_personality_v0",
2347 NULL
2348 };
2349
2350 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2351 int k;
2352
2353 for (k = 0; personality[k]; k++)
2354 if (lookup_minimal_symbol_by_pc_name
2355 (pc, personality[k], objfile))
2356 {
2357 gnu_personality = 1;
2358 break;
2359 }
2360 }
2361
2362 /* If so, the next word contains a word count in the high
2363 byte, followed by the same unwind instructions as the
2364 pre-defined forms. */
2365 if (gnu_personality
2366 && addr + 4 <= extab_vma + extab_size)
2367 {
2368 word = bfd_h_get_32 (objfile->obfd,
2369 extab_data + addr - extab_vma);
2370 addr += 4;
2371 n_bytes = 3;
2372 n_words = ((word >> 24) & 0xff);
2373 }
2374 }
2375 }
2376 }
2377
2378 /* Sanity check address. */
2379 if (n_words)
2380 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2381 n_words = n_bytes = 0;
2382
2383 /* The unwind instructions reside in WORD (only the N_BYTES least
2384 significant bytes are valid), followed by N_WORDS words in the
2385 extab section starting at ADDR. */
2386 if (n_bytes || n_words)
2387 {
2388 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2389 n_bytes + n_words * 4 + 1);
2390
2391 while (n_bytes--)
2392 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2393
2394 while (n_words--)
2395 {
2396 word = bfd_h_get_32 (objfile->obfd,
2397 extab_data + addr - extab_vma);
2398 addr += 4;
2399
2400 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2401 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2402 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2403 *p++ = (gdb_byte) (word & 0xff);
2404 }
2405
2406 /* Implied "Finish" to terminate the list. */
2407 *p++ = 0xb0;
2408 }
2409
2410 /* Push entry onto vector. They are guaranteed to always
2411 appear in order of increasing addresses. */
2412 new_exidx_entry.addr = idx;
2413 new_exidx_entry.entry = entry;
2414 VEC_safe_push (arm_exidx_entry_s,
2415 data->section_maps[sec->the_bfd_section->index],
2416 &new_exidx_entry);
2417 }
2418
2419 do_cleanups (cleanups);
2420 }
2421
2422 /* Search for the exception table entry covering MEMADDR. If one is found,
2423 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2424 set *START to the start of the region covered by this entry. */
2425
2426 static gdb_byte *
2427 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2428 {
2429 struct obj_section *sec;
2430
2431 sec = find_pc_section (memaddr);
2432 if (sec != NULL)
2433 {
2434 struct arm_exidx_data *data;
2435 VEC(arm_exidx_entry_s) *map;
2436 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2437 unsigned int idx;
2438
2439 data = objfile_data (sec->objfile, arm_exidx_data_key);
2440 if (data != NULL)
2441 {
2442 map = data->section_maps[sec->the_bfd_section->index];
2443 if (!VEC_empty (arm_exidx_entry_s, map))
2444 {
2445 struct arm_exidx_entry *map_sym;
2446
2447 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2448 arm_compare_exidx_entries);
2449
2450 /* VEC_lower_bound finds the earliest ordered insertion
2451 point. If the following symbol starts at this exact
2452 address, we use that; otherwise, the preceding
2453 exception table entry covers this address. */
2454 if (idx < VEC_length (arm_exidx_entry_s, map))
2455 {
2456 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2457 if (map_sym->addr == map_key.addr)
2458 {
2459 if (start)
2460 *start = map_sym->addr + obj_section_addr (sec);
2461 return map_sym->entry;
2462 }
2463 }
2464
2465 if (idx > 0)
2466 {
2467 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2468 if (start)
2469 *start = map_sym->addr + obj_section_addr (sec);
2470 return map_sym->entry;
2471 }
2472 }
2473 }
2474 }
2475
2476 return NULL;
2477 }
2478
2479 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2480 instruction list from the ARM exception table entry ENTRY, allocate and
2481 return a prologue cache structure describing how to unwind this frame.
2482
2483 Return NULL if the unwinding instruction list contains a "spare",
2484 "reserved" or "refuse to unwind" instruction as defined in section
2485 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2486 for the ARM Architecture" document. */
2487
2488 static struct arm_prologue_cache *
2489 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2490 {
2491 CORE_ADDR vsp = 0;
2492 int vsp_valid = 0;
2493
2494 struct arm_prologue_cache *cache;
2495 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2496 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2497
2498 for (;;)
2499 {
2500 gdb_byte insn;
2501
2502 /* Whenever we reload SP, we actually have to retrieve its
2503 actual value in the current frame. */
2504 if (!vsp_valid)
2505 {
2506 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2507 {
2508 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2509 vsp = get_frame_register_unsigned (this_frame, reg);
2510 }
2511 else
2512 {
2513 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2514 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2515 }
2516
2517 vsp_valid = 1;
2518 }
2519
2520 /* Decode next unwind instruction. */
2521 insn = *entry++;
2522
2523 if ((insn & 0xc0) == 0)
2524 {
2525 int offset = insn & 0x3f;
2526 vsp += (offset << 2) + 4;
2527 }
2528 else if ((insn & 0xc0) == 0x40)
2529 {
2530 int offset = insn & 0x3f;
2531 vsp -= (offset << 2) + 4;
2532 }
2533 else if ((insn & 0xf0) == 0x80)
2534 {
2535 int mask = ((insn & 0xf) << 8) | *entry++;
2536 int i;
2537
2538 /* The special case of an all-zero mask identifies
2539 "Refuse to unwind". We return NULL to fall back
2540 to the prologue analyzer. */
2541 if (mask == 0)
2542 return NULL;
2543
2544 /* Pop registers r4..r15 under mask. */
2545 for (i = 0; i < 12; i++)
2546 if (mask & (1 << i))
2547 {
2548 cache->saved_regs[4 + i].addr = vsp;
2549 vsp += 4;
2550 }
2551
2552 /* Special-case popping SP -- we need to reload vsp. */
2553 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2554 vsp_valid = 0;
2555 }
2556 else if ((insn & 0xf0) == 0x90)
2557 {
2558 int reg = insn & 0xf;
2559
2560 /* Reserved cases. */
2561 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2562 return NULL;
2563
2564 /* Set SP from another register and mark VSP for reload. */
2565 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2566 vsp_valid = 0;
2567 }
2568 else if ((insn & 0xf0) == 0xa0)
2569 {
2570 int count = insn & 0x7;
2571 int pop_lr = (insn & 0x8) != 0;
2572 int i;
2573
2574 /* Pop r4..r[4+count]. */
2575 for (i = 0; i <= count; i++)
2576 {
2577 cache->saved_regs[4 + i].addr = vsp;
2578 vsp += 4;
2579 }
2580
2581 /* If indicated by flag, pop LR as well. */
2582 if (pop_lr)
2583 {
2584 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2585 vsp += 4;
2586 }
2587 }
2588 else if (insn == 0xb0)
2589 {
2590 /* We could only have updated PC by popping into it; if so, it
2591 will show up as address. Otherwise, copy LR into PC. */
2592 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2593 cache->saved_regs[ARM_PC_REGNUM]
2594 = cache->saved_regs[ARM_LR_REGNUM];
2595
2596 /* We're done. */
2597 break;
2598 }
2599 else if (insn == 0xb1)
2600 {
2601 int mask = *entry++;
2602 int i;
2603
2604 /* All-zero mask and mask >= 16 is "spare". */
2605 if (mask == 0 || mask >= 16)
2606 return NULL;
2607
2608 /* Pop r0..r3 under mask. */
2609 for (i = 0; i < 4; i++)
2610 if (mask & (1 << i))
2611 {
2612 cache->saved_regs[i].addr = vsp;
2613 vsp += 4;
2614 }
2615 }
2616 else if (insn == 0xb2)
2617 {
2618 ULONGEST offset = 0;
2619 unsigned shift = 0;
2620
2621 do
2622 {
2623 offset |= (*entry & 0x7f) << shift;
2624 shift += 7;
2625 }
2626 while (*entry++ & 0x80);
2627
2628 vsp += 0x204 + (offset << 2);
2629 }
2630 else if (insn == 0xb3)
2631 {
2632 int start = *entry >> 4;
2633 int count = (*entry++) & 0xf;
2634 int i;
2635
2636 /* Only registers D0..D15 are valid here. */
2637 if (start + count >= 16)
2638 return NULL;
2639
2640 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2641 for (i = 0; i <= count; i++)
2642 {
2643 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2644 vsp += 8;
2645 }
2646
2647 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2648 vsp += 4;
2649 }
2650 else if ((insn & 0xf8) == 0xb8)
2651 {
2652 int count = insn & 0x7;
2653 int i;
2654
2655 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2656 for (i = 0; i <= count; i++)
2657 {
2658 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2659 vsp += 8;
2660 }
2661
2662 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2663 vsp += 4;
2664 }
2665 else if (insn == 0xc6)
2666 {
2667 int start = *entry >> 4;
2668 int count = (*entry++) & 0xf;
2669 int i;
2670
2671 /* Only registers WR0..WR15 are valid. */
2672 if (start + count >= 16)
2673 return NULL;
2674
2675 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2676 for (i = 0; i <= count; i++)
2677 {
2678 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2679 vsp += 8;
2680 }
2681 }
2682 else if (insn == 0xc7)
2683 {
2684 int mask = *entry++;
2685 int i;
2686
2687 /* All-zero mask and mask >= 16 is "spare". */
2688 if (mask == 0 || mask >= 16)
2689 return NULL;
2690
2691 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2692 for (i = 0; i < 4; i++)
2693 if (mask & (1 << i))
2694 {
2695 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2696 vsp += 4;
2697 }
2698 }
2699 else if ((insn & 0xf8) == 0xc0)
2700 {
2701 int count = insn & 0x7;
2702 int i;
2703
2704 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2705 for (i = 0; i <= count; i++)
2706 {
2707 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2708 vsp += 8;
2709 }
2710 }
2711 else if (insn == 0xc8)
2712 {
2713 int start = *entry >> 4;
2714 int count = (*entry++) & 0xf;
2715 int i;
2716
2717 /* Only registers D0..D31 are valid. */
2718 if (start + count >= 16)
2719 return NULL;
2720
2721 /* Pop VFP double-precision registers
2722 D[16+start]..D[16+start+count]. */
2723 for (i = 0; i <= count; i++)
2724 {
2725 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2726 vsp += 8;
2727 }
2728 }
2729 else if (insn == 0xc9)
2730 {
2731 int start = *entry >> 4;
2732 int count = (*entry++) & 0xf;
2733 int i;
2734
2735 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2736 for (i = 0; i <= count; i++)
2737 {
2738 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2739 vsp += 8;
2740 }
2741 }
2742 else if ((insn & 0xf8) == 0xd0)
2743 {
2744 int count = insn & 0x7;
2745 int i;
2746
2747 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2748 for (i = 0; i <= count; i++)
2749 {
2750 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2751 vsp += 8;
2752 }
2753 }
2754 else
2755 {
2756 /* Everything else is "spare". */
2757 return NULL;
2758 }
2759 }
2760
2761 /* If we restore SP from a register, assume this was the frame register.
2762 Otherwise just fall back to SP as frame register. */
2763 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2764 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2765 else
2766 cache->framereg = ARM_SP_REGNUM;
2767
2768 /* Determine offset to previous frame. */
2769 cache->framesize
2770 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2771
2772 /* We already got the previous SP. */
2773 cache->prev_sp = vsp;
2774
2775 return cache;
2776 }
2777
2778 /* Unwinding via ARM exception table entries. Note that the sniffer
2779 already computes a filled-in prologue cache, which is then used
2780 with the same arm_prologue_this_id and arm_prologue_prev_register
2781 routines also used for prologue-parsing based unwinding. */
2782
2783 static int
2784 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2785 struct frame_info *this_frame,
2786 void **this_prologue_cache)
2787 {
2788 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2789 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2790 CORE_ADDR addr_in_block, exidx_region, func_start;
2791 struct arm_prologue_cache *cache;
2792 gdb_byte *entry;
2793
2794 /* See if we have an ARM exception table entry covering this address. */
2795 addr_in_block = get_frame_address_in_block (this_frame);
2796 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2797 if (!entry)
2798 return 0;
2799
2800 /* The ARM exception table does not describe unwind information
2801 for arbitrary PC values, but is guaranteed to be correct only
2802 at call sites. We have to decide here whether we want to use
2803 ARM exception table information for this frame, or fall back
2804 to using prologue parsing. (Note that if we have DWARF CFI,
2805 this sniffer isn't even called -- CFI is always preferred.)
2806
2807 Before we make this decision, however, we check whether we
2808 actually have *symbol* information for the current frame.
2809 If not, prologue parsing would not work anyway, so we might
2810 as well use the exception table and hope for the best. */
2811 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2812 {
2813 int exc_valid = 0;
2814
2815 /* If the next frame is "normal", we are at a call site in this
2816 frame, so exception information is guaranteed to be valid. */
2817 if (get_next_frame (this_frame)
2818 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2819 exc_valid = 1;
2820
2821 /* We also assume exception information is valid if we're currently
2822 blocked in a system call. The system library is supposed to
2823 ensure this, so that e.g. pthread cancellation works. */
2824 if (arm_frame_is_thumb (this_frame))
2825 {
2826 LONGEST insn;
2827
2828 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2829 byte_order_for_code, &insn)
2830 && (insn & 0xff00) == 0xdf00 /* svc */)
2831 exc_valid = 1;
2832 }
2833 else
2834 {
2835 LONGEST insn;
2836
2837 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2838 byte_order_for_code, &insn)
2839 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2840 exc_valid = 1;
2841 }
2842
2843 /* Bail out if we don't know that exception information is valid. */
2844 if (!exc_valid)
2845 return 0;
2846
2847 /* The ARM exception index does not mark the *end* of the region
2848 covered by the entry, and some functions will not have any entry.
2849 To correctly recognize the end of the covered region, the linker
2850 should have inserted dummy records with a CANTUNWIND marker.
2851
2852 Unfortunately, current versions of GNU ld do not reliably do
2853 this, and thus we may have found an incorrect entry above.
2854 As a (temporary) sanity check, we only use the entry if it
2855 lies *within* the bounds of the function. Note that this check
2856 might reject perfectly valid entries that just happen to cover
2857 multiple functions; therefore this check ought to be removed
2858 once the linker is fixed. */
2859 if (func_start > exidx_region)
2860 return 0;
2861 }
2862
2863 /* Decode the list of unwinding instructions into a prologue cache.
2864 Note that this may fail due to e.g. a "refuse to unwind" code. */
2865 cache = arm_exidx_fill_cache (this_frame, entry);
2866 if (!cache)
2867 return 0;
2868
2869 *this_prologue_cache = cache;
2870 return 1;
2871 }
2872
2873 struct frame_unwind arm_exidx_unwind = {
2874 NORMAL_FRAME,
2875 default_frame_unwind_stop_reason,
2876 arm_prologue_this_id,
2877 arm_prologue_prev_register,
2878 NULL,
2879 arm_exidx_unwind_sniffer
2880 };
2881
2882 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2883 trampoline, return the target PC. Otherwise return 0.
2884
2885 void call0a (char c, short s, int i, long l) {}
2886
2887 int main (void)
2888 {
2889 (*pointer_to_call0a) (c, s, i, l);
2890 }
2891
2892 Instead of calling a stub library function _call_via_xx (xx is
2893 the register name), GCC may inline the trampoline in the object
2894 file as below (register r2 has the address of call0a).
2895
2896 .global main
2897 .type main, %function
2898 ...
2899 bl .L1
2900 ...
2901 .size main, .-main
2902
2903 .L1:
2904 bx r2
2905
2906 The trampoline 'bx r2' doesn't belong to main. */
2907
2908 static CORE_ADDR
2909 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2910 {
2911 /* The heuristics of recognizing such trampoline is that FRAME is
2912 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2913 if (arm_frame_is_thumb (frame))
2914 {
2915 gdb_byte buf[2];
2916
2917 if (target_read_memory (pc, buf, 2) == 0)
2918 {
2919 struct gdbarch *gdbarch = get_frame_arch (frame);
2920 enum bfd_endian byte_order_for_code
2921 = gdbarch_byte_order_for_code (gdbarch);
2922 uint16_t insn
2923 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2924
2925 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2926 {
2927 CORE_ADDR dest
2928 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2929
2930 /* Clear the LSB so that gdb core sets step-resume
2931 breakpoint at the right address. */
2932 return UNMAKE_THUMB_ADDR (dest);
2933 }
2934 }
2935 }
2936
2937 return 0;
2938 }
2939
2940 static struct arm_prologue_cache *
2941 arm_make_stub_cache (struct frame_info *this_frame)
2942 {
2943 struct arm_prologue_cache *cache;
2944
2945 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2946 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2947
2948 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2949
2950 return cache;
2951 }
2952
2953 /* Our frame ID for a stub frame is the current SP and LR. */
2954
2955 static void
2956 arm_stub_this_id (struct frame_info *this_frame,
2957 void **this_cache,
2958 struct frame_id *this_id)
2959 {
2960 struct arm_prologue_cache *cache;
2961
2962 if (*this_cache == NULL)
2963 *this_cache = arm_make_stub_cache (this_frame);
2964 cache = *this_cache;
2965
2966 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2967 }
2968
2969 static int
2970 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2971 struct frame_info *this_frame,
2972 void **this_prologue_cache)
2973 {
2974 CORE_ADDR addr_in_block;
2975 gdb_byte dummy[4];
2976 CORE_ADDR pc, start_addr;
2977 const char *name;
2978
2979 addr_in_block = get_frame_address_in_block (this_frame);
2980 pc = get_frame_pc (this_frame);
2981 if (in_plt_section (addr_in_block)
2982 /* We also use the stub winder if the target memory is unreadable
2983 to avoid having the prologue unwinder trying to read it. */
2984 || target_read_memory (pc, dummy, 4) != 0)
2985 return 1;
2986
2987 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2988 && arm_skip_bx_reg (this_frame, pc) != 0)
2989 return 1;
2990
2991 return 0;
2992 }
2993
2994 struct frame_unwind arm_stub_unwind = {
2995 NORMAL_FRAME,
2996 default_frame_unwind_stop_reason,
2997 arm_stub_this_id,
2998 arm_prologue_prev_register,
2999 NULL,
3000 arm_stub_unwind_sniffer
3001 };
3002
3003 /* Put here the code to store, into CACHE->saved_regs, the addresses
3004 of the saved registers of frame described by THIS_FRAME. CACHE is
3005 returned. */
3006
3007 static struct arm_prologue_cache *
3008 arm_m_exception_cache (struct frame_info *this_frame)
3009 {
3010 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3011 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3012 struct arm_prologue_cache *cache;
3013 CORE_ADDR unwound_sp;
3014 LONGEST xpsr;
3015
3016 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3017 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3018
3019 unwound_sp = get_frame_register_unsigned (this_frame,
3020 ARM_SP_REGNUM);
3021
3022 /* The hardware saves eight 32-bit words, comprising xPSR,
3023 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3024 "B1.5.6 Exception entry behavior" in
3025 "ARMv7-M Architecture Reference Manual". */
3026 cache->saved_regs[0].addr = unwound_sp;
3027 cache->saved_regs[1].addr = unwound_sp + 4;
3028 cache->saved_regs[2].addr = unwound_sp + 8;
3029 cache->saved_regs[3].addr = unwound_sp + 12;
3030 cache->saved_regs[12].addr = unwound_sp + 16;
3031 cache->saved_regs[14].addr = unwound_sp + 20;
3032 cache->saved_regs[15].addr = unwound_sp + 24;
3033 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3034
3035 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3036 aligner between the top of the 32-byte stack frame and the
3037 previous context's stack pointer. */
3038 cache->prev_sp = unwound_sp + 32;
3039 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3040 && (xpsr & (1 << 9)) != 0)
3041 cache->prev_sp += 4;
3042
3043 return cache;
3044 }
3045
3046 /* Implementation of function hook 'this_id' in
3047 'struct frame_uwnind'. */
3048
3049 static void
3050 arm_m_exception_this_id (struct frame_info *this_frame,
3051 void **this_cache,
3052 struct frame_id *this_id)
3053 {
3054 struct arm_prologue_cache *cache;
3055
3056 if (*this_cache == NULL)
3057 *this_cache = arm_m_exception_cache (this_frame);
3058 cache = *this_cache;
3059
3060 /* Our frame ID for a stub frame is the current SP and LR. */
3061 *this_id = frame_id_build (cache->prev_sp,
3062 get_frame_pc (this_frame));
3063 }
3064
3065 /* Implementation of function hook 'prev_register' in
3066 'struct frame_uwnind'. */
3067
3068 static struct value *
3069 arm_m_exception_prev_register (struct frame_info *this_frame,
3070 void **this_cache,
3071 int prev_regnum)
3072 {
3073 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3074 struct arm_prologue_cache *cache;
3075
3076 if (*this_cache == NULL)
3077 *this_cache = arm_m_exception_cache (this_frame);
3078 cache = *this_cache;
3079
3080 /* The value was already reconstructed into PREV_SP. */
3081 if (prev_regnum == ARM_SP_REGNUM)
3082 return frame_unwind_got_constant (this_frame, prev_regnum,
3083 cache->prev_sp);
3084
3085 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3086 prev_regnum);
3087 }
3088
3089 /* Implementation of function hook 'sniffer' in
3090 'struct frame_uwnind'. */
3091
3092 static int
3093 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3094 struct frame_info *this_frame,
3095 void **this_prologue_cache)
3096 {
3097 CORE_ADDR this_pc = get_frame_pc (this_frame);
3098
3099 /* No need to check is_m; this sniffer is only registered for
3100 M-profile architectures. */
3101
3102 /* Exception frames return to one of these magic PCs. Other values
3103 are not defined as of v7-M. See details in "B1.5.8 Exception
3104 return behavior" in "ARMv7-M Architecture Reference Manual". */
3105 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3106 || this_pc == 0xfffffffd)
3107 return 1;
3108
3109 return 0;
3110 }
3111
3112 /* Frame unwinder for M-profile exceptions. */
3113
3114 struct frame_unwind arm_m_exception_unwind =
3115 {
3116 SIGTRAMP_FRAME,
3117 default_frame_unwind_stop_reason,
3118 arm_m_exception_this_id,
3119 arm_m_exception_prev_register,
3120 NULL,
3121 arm_m_exception_unwind_sniffer
3122 };
3123
3124 static CORE_ADDR
3125 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3126 {
3127 struct arm_prologue_cache *cache;
3128
3129 if (*this_cache == NULL)
3130 *this_cache = arm_make_prologue_cache (this_frame);
3131 cache = *this_cache;
3132
3133 return cache->prev_sp - cache->framesize;
3134 }
3135
3136 struct frame_base arm_normal_base = {
3137 &arm_prologue_unwind,
3138 arm_normal_frame_base,
3139 arm_normal_frame_base,
3140 arm_normal_frame_base
3141 };
3142
3143 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3144 dummy frame. The frame ID's base needs to match the TOS value
3145 saved by save_dummy_frame_tos() and returned from
3146 arm_push_dummy_call, and the PC needs to match the dummy frame's
3147 breakpoint. */
3148
3149 static struct frame_id
3150 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3151 {
3152 return frame_id_build (get_frame_register_unsigned (this_frame,
3153 ARM_SP_REGNUM),
3154 get_frame_pc (this_frame));
3155 }
3156
3157 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3158 be used to construct the previous frame's ID, after looking up the
3159 containing function). */
3160
3161 static CORE_ADDR
3162 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3163 {
3164 CORE_ADDR pc;
3165 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3166 return arm_addr_bits_remove (gdbarch, pc);
3167 }
3168
3169 static CORE_ADDR
3170 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3171 {
3172 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3173 }
3174
3175 static struct value *
3176 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3177 int regnum)
3178 {
3179 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3180 CORE_ADDR lr, cpsr;
3181 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3182
3183 switch (regnum)
3184 {
3185 case ARM_PC_REGNUM:
3186 /* The PC is normally copied from the return column, which
3187 describes saves of LR. However, that version may have an
3188 extra bit set to indicate Thumb state. The bit is not
3189 part of the PC. */
3190 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3191 return frame_unwind_got_constant (this_frame, regnum,
3192 arm_addr_bits_remove (gdbarch, lr));
3193
3194 case ARM_PS_REGNUM:
3195 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3196 cpsr = get_frame_register_unsigned (this_frame, regnum);
3197 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3198 if (IS_THUMB_ADDR (lr))
3199 cpsr |= t_bit;
3200 else
3201 cpsr &= ~t_bit;
3202 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3203
3204 default:
3205 internal_error (__FILE__, __LINE__,
3206 _("Unexpected register %d"), regnum);
3207 }
3208 }
3209
3210 static void
3211 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3212 struct dwarf2_frame_state_reg *reg,
3213 struct frame_info *this_frame)
3214 {
3215 switch (regnum)
3216 {
3217 case ARM_PC_REGNUM:
3218 case ARM_PS_REGNUM:
3219 reg->how = DWARF2_FRAME_REG_FN;
3220 reg->loc.fn = arm_dwarf2_prev_register;
3221 break;
3222 case ARM_SP_REGNUM:
3223 reg->how = DWARF2_FRAME_REG_CFA;
3224 break;
3225 }
3226 }
3227
3228 /* Return true if we are in the function's epilogue, i.e. after the
3229 instruction that destroyed the function's stack frame. */
3230
3231 static int
3232 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3233 {
3234 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3235 unsigned int insn, insn2;
3236 int found_return = 0, found_stack_adjust = 0;
3237 CORE_ADDR func_start, func_end;
3238 CORE_ADDR scan_pc;
3239 gdb_byte buf[4];
3240
3241 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3242 return 0;
3243
3244 /* The epilogue is a sequence of instructions along the following lines:
3245
3246 - add stack frame size to SP or FP
3247 - [if frame pointer used] restore SP from FP
3248 - restore registers from SP [may include PC]
3249 - a return-type instruction [if PC wasn't already restored]
3250
3251 In a first pass, we scan forward from the current PC and verify the
3252 instructions we find as compatible with this sequence, ending in a
3253 return instruction.
3254
3255 However, this is not sufficient to distinguish indirect function calls
3256 within a function from indirect tail calls in the epilogue in some cases.
3257 Therefore, if we didn't already find any SP-changing instruction during
3258 forward scan, we add a backward scanning heuristic to ensure we actually
3259 are in the epilogue. */
3260
3261 scan_pc = pc;
3262 while (scan_pc < func_end && !found_return)
3263 {
3264 if (target_read_memory (scan_pc, buf, 2))
3265 break;
3266
3267 scan_pc += 2;
3268 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3269
3270 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3271 found_return = 1;
3272 else if (insn == 0x46f7) /* mov pc, lr */
3273 found_return = 1;
3274 else if (thumb_instruction_restores_sp (insn))
3275 {
3276 found_stack_adjust = 1;
3277 if ((insn & 0xfe00) == 0xbd00) /* pop <registers, PC> */
3278 found_return = 1;
3279 }
3280 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3281 {
3282 if (target_read_memory (scan_pc, buf, 2))
3283 break;
3284
3285 scan_pc += 2;
3286 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3287
3288 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3289 {
3290 found_stack_adjust = 1;
3291 if (insn2 & 0x8000) /* <registers> include PC. */
3292 found_return = 1;
3293 }
3294 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3295 && (insn2 & 0x0fff) == 0x0b04)
3296 {
3297 found_stack_adjust = 1;
3298 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3299 found_return = 1;
3300 }
3301 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3302 && (insn2 & 0x0e00) == 0x0a00)
3303 found_stack_adjust = 1;
3304 else
3305 break;
3306 }
3307 else
3308 break;
3309 }
3310
3311 if (!found_return)
3312 return 0;
3313
3314 /* Since any instruction in the epilogue sequence, with the possible
3315 exception of return itself, updates the stack pointer, we need to
3316 scan backwards for at most one instruction. Try either a 16-bit or
3317 a 32-bit instruction. This is just a heuristic, so we do not worry
3318 too much about false positives. */
3319
3320 if (!found_stack_adjust)
3321 {
3322 if (pc - 4 < func_start)
3323 return 0;
3324 if (target_read_memory (pc - 4, buf, 4))
3325 return 0;
3326
3327 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3328 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3329
3330 if (thumb_instruction_restores_sp (insn2))
3331 found_stack_adjust = 1;
3332 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3333 found_stack_adjust = 1;
3334 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3335 && (insn2 & 0x0fff) == 0x0b04)
3336 found_stack_adjust = 1;
3337 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3338 && (insn2 & 0x0e00) == 0x0a00)
3339 found_stack_adjust = 1;
3340 }
3341
3342 return found_stack_adjust;
3343 }
3344
3345 /* Return true if we are in the function's epilogue, i.e. after the
3346 instruction that destroyed the function's stack frame. */
3347
3348 static int
3349 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3350 {
3351 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3352 unsigned int insn;
3353 int found_return, found_stack_adjust;
3354 CORE_ADDR func_start, func_end;
3355
3356 if (arm_pc_is_thumb (gdbarch, pc))
3357 return thumb_in_function_epilogue_p (gdbarch, pc);
3358
3359 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3360 return 0;
3361
3362 /* We are in the epilogue if the previous instruction was a stack
3363 adjustment and the next instruction is a possible return (bx, mov
3364 pc, or pop). We could have to scan backwards to find the stack
3365 adjustment, or forwards to find the return, but this is a decent
3366 approximation. First scan forwards. */
3367
3368 found_return = 0;
3369 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3370 if (bits (insn, 28, 31) != INST_NV)
3371 {
3372 if ((insn & 0x0ffffff0) == 0x012fff10)
3373 /* BX. */
3374 found_return = 1;
3375 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3376 /* MOV PC. */
3377 found_return = 1;
3378 else if ((insn & 0x0fff0000) == 0x08bd0000
3379 && (insn & 0x0000c000) != 0)
3380 /* POP (LDMIA), including PC or LR. */
3381 found_return = 1;
3382 }
3383
3384 if (!found_return)
3385 return 0;
3386
3387 /* Scan backwards. This is just a heuristic, so do not worry about
3388 false positives from mode changes. */
3389
3390 if (pc < func_start + 4)
3391 return 0;
3392
3393 found_stack_adjust = 0;
3394 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3395 if (bits (insn, 28, 31) != INST_NV)
3396 {
3397 if ((insn & 0x0df0f000) == 0x0080d000)
3398 /* ADD SP (register or immediate). */
3399 found_stack_adjust = 1;
3400 else if ((insn & 0x0df0f000) == 0x0040d000)
3401 /* SUB SP (register or immediate). */
3402 found_stack_adjust = 1;
3403 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3404 /* MOV SP. */
3405 found_stack_adjust = 1;
3406 else if ((insn & 0x0fff0000) == 0x08bd0000)
3407 /* POP (LDMIA). */
3408 found_stack_adjust = 1;
3409 else if ((insn & 0x0fff0000) == 0x049d0000)
3410 /* POP of a single register. */
3411 found_stack_adjust = 1;
3412 }
3413
3414 if (found_stack_adjust)
3415 return 1;
3416
3417 return 0;
3418 }
3419
3420
3421 /* When arguments must be pushed onto the stack, they go on in reverse
3422 order. The code below implements a FILO (stack) to do this. */
3423
3424 struct stack_item
3425 {
3426 int len;
3427 struct stack_item *prev;
3428 void *data;
3429 };
3430
3431 static struct stack_item *
3432 push_stack_item (struct stack_item *prev, const void *contents, int len)
3433 {
3434 struct stack_item *si;
3435 si = xmalloc (sizeof (struct stack_item));
3436 si->data = xmalloc (len);
3437 si->len = len;
3438 si->prev = prev;
3439 memcpy (si->data, contents, len);
3440 return si;
3441 }
3442
3443 static struct stack_item *
3444 pop_stack_item (struct stack_item *si)
3445 {
3446 struct stack_item *dead = si;
3447 si = si->prev;
3448 xfree (dead->data);
3449 xfree (dead);
3450 return si;
3451 }
3452
3453
3454 /* Return the alignment (in bytes) of the given type. */
3455
3456 static int
3457 arm_type_align (struct type *t)
3458 {
3459 int n;
3460 int align;
3461 int falign;
3462
3463 t = check_typedef (t);
3464 switch (TYPE_CODE (t))
3465 {
3466 default:
3467 /* Should never happen. */
3468 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3469 return 4;
3470
3471 case TYPE_CODE_PTR:
3472 case TYPE_CODE_ENUM:
3473 case TYPE_CODE_INT:
3474 case TYPE_CODE_FLT:
3475 case TYPE_CODE_SET:
3476 case TYPE_CODE_RANGE:
3477 case TYPE_CODE_REF:
3478 case TYPE_CODE_CHAR:
3479 case TYPE_CODE_BOOL:
3480 return TYPE_LENGTH (t);
3481
3482 case TYPE_CODE_ARRAY:
3483 case TYPE_CODE_COMPLEX:
3484 /* TODO: What about vector types? */
3485 return arm_type_align (TYPE_TARGET_TYPE (t));
3486
3487 case TYPE_CODE_STRUCT:
3488 case TYPE_CODE_UNION:
3489 align = 1;
3490 for (n = 0; n < TYPE_NFIELDS (t); n++)
3491 {
3492 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3493 if (falign > align)
3494 align = falign;
3495 }
3496 return align;
3497 }
3498 }
3499
3500 /* Possible base types for a candidate for passing and returning in
3501 VFP registers. */
3502
3503 enum arm_vfp_cprc_base_type
3504 {
3505 VFP_CPRC_UNKNOWN,
3506 VFP_CPRC_SINGLE,
3507 VFP_CPRC_DOUBLE,
3508 VFP_CPRC_VEC64,
3509 VFP_CPRC_VEC128
3510 };
3511
3512 /* The length of one element of base type B. */
3513
3514 static unsigned
3515 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3516 {
3517 switch (b)
3518 {
3519 case VFP_CPRC_SINGLE:
3520 return 4;
3521 case VFP_CPRC_DOUBLE:
3522 return 8;
3523 case VFP_CPRC_VEC64:
3524 return 8;
3525 case VFP_CPRC_VEC128:
3526 return 16;
3527 default:
3528 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3529 (int) b);
3530 }
3531 }
3532
3533 /* The character ('s', 'd' or 'q') for the type of VFP register used
3534 for passing base type B. */
3535
3536 static int
3537 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3538 {
3539 switch (b)
3540 {
3541 case VFP_CPRC_SINGLE:
3542 return 's';
3543 case VFP_CPRC_DOUBLE:
3544 return 'd';
3545 case VFP_CPRC_VEC64:
3546 return 'd';
3547 case VFP_CPRC_VEC128:
3548 return 'q';
3549 default:
3550 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3551 (int) b);
3552 }
3553 }
3554
3555 /* Determine whether T may be part of a candidate for passing and
3556 returning in VFP registers, ignoring the limit on the total number
3557 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3558 classification of the first valid component found; if it is not
3559 VFP_CPRC_UNKNOWN, all components must have the same classification
3560 as *BASE_TYPE. If it is found that T contains a type not permitted
3561 for passing and returning in VFP registers, a type differently
3562 classified from *BASE_TYPE, or two types differently classified
3563 from each other, return -1, otherwise return the total number of
3564 base-type elements found (possibly 0 in an empty structure or
3565 array). Vector types are not currently supported, matching the
3566 generic AAPCS support. */
3567
3568 static int
3569 arm_vfp_cprc_sub_candidate (struct type *t,
3570 enum arm_vfp_cprc_base_type *base_type)
3571 {
3572 t = check_typedef (t);
3573 switch (TYPE_CODE (t))
3574 {
3575 case TYPE_CODE_FLT:
3576 switch (TYPE_LENGTH (t))
3577 {
3578 case 4:
3579 if (*base_type == VFP_CPRC_UNKNOWN)
3580 *base_type = VFP_CPRC_SINGLE;
3581 else if (*base_type != VFP_CPRC_SINGLE)
3582 return -1;
3583 return 1;
3584
3585 case 8:
3586 if (*base_type == VFP_CPRC_UNKNOWN)
3587 *base_type = VFP_CPRC_DOUBLE;
3588 else if (*base_type != VFP_CPRC_DOUBLE)
3589 return -1;
3590 return 1;
3591
3592 default:
3593 return -1;
3594 }
3595 break;
3596
3597 case TYPE_CODE_COMPLEX:
3598 /* Arguments of complex T where T is one of the types float or
3599 double get treated as if they are implemented as:
3600
3601 struct complexT
3602 {
3603 T real;
3604 T imag;
3605 };
3606
3607 */
3608 switch (TYPE_LENGTH (t))
3609 {
3610 case 8:
3611 if (*base_type == VFP_CPRC_UNKNOWN)
3612 *base_type = VFP_CPRC_SINGLE;
3613 else if (*base_type != VFP_CPRC_SINGLE)
3614 return -1;
3615 return 2;
3616
3617 case 16:
3618 if (*base_type == VFP_CPRC_UNKNOWN)
3619 *base_type = VFP_CPRC_DOUBLE;
3620 else if (*base_type != VFP_CPRC_DOUBLE)
3621 return -1;
3622 return 2;
3623
3624 default:
3625 return -1;
3626 }
3627 break;
3628
3629 case TYPE_CODE_ARRAY:
3630 {
3631 int count;
3632 unsigned unitlen;
3633 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3634 if (count == -1)
3635 return -1;
3636 if (TYPE_LENGTH (t) == 0)
3637 {
3638 gdb_assert (count == 0);
3639 return 0;
3640 }
3641 else if (count == 0)
3642 return -1;
3643 unitlen = arm_vfp_cprc_unit_length (*base_type);
3644 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3645 return TYPE_LENGTH (t) / unitlen;
3646 }
3647 break;
3648
3649 case TYPE_CODE_STRUCT:
3650 {
3651 int count = 0;
3652 unsigned unitlen;
3653 int i;
3654 for (i = 0; i < TYPE_NFIELDS (t); i++)
3655 {
3656 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3657 base_type);
3658 if (sub_count == -1)
3659 return -1;
3660 count += sub_count;
3661 }
3662 if (TYPE_LENGTH (t) == 0)
3663 {
3664 gdb_assert (count == 0);
3665 return 0;
3666 }
3667 else if (count == 0)
3668 return -1;
3669 unitlen = arm_vfp_cprc_unit_length (*base_type);
3670 if (TYPE_LENGTH (t) != unitlen * count)
3671 return -1;
3672 return count;
3673 }
3674
3675 case TYPE_CODE_UNION:
3676 {
3677 int count = 0;
3678 unsigned unitlen;
3679 int i;
3680 for (i = 0; i < TYPE_NFIELDS (t); i++)
3681 {
3682 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3683 base_type);
3684 if (sub_count == -1)
3685 return -1;
3686 count = (count > sub_count ? count : sub_count);
3687 }
3688 if (TYPE_LENGTH (t) == 0)
3689 {
3690 gdb_assert (count == 0);
3691 return 0;
3692 }
3693 else if (count == 0)
3694 return -1;
3695 unitlen = arm_vfp_cprc_unit_length (*base_type);
3696 if (TYPE_LENGTH (t) != unitlen * count)
3697 return -1;
3698 return count;
3699 }
3700
3701 default:
3702 break;
3703 }
3704
3705 return -1;
3706 }
3707
3708 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3709 if passed to or returned from a non-variadic function with the VFP
3710 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3711 *BASE_TYPE to the base type for T and *COUNT to the number of
3712 elements of that base type before returning. */
3713
3714 static int
3715 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3716 int *count)
3717 {
3718 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3719 int c = arm_vfp_cprc_sub_candidate (t, &b);
3720 if (c <= 0 || c > 4)
3721 return 0;
3722 *base_type = b;
3723 *count = c;
3724 return 1;
3725 }
3726
3727 /* Return 1 if the VFP ABI should be used for passing arguments to and
3728 returning values from a function of type FUNC_TYPE, 0
3729 otherwise. */
3730
3731 static int
3732 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3733 {
3734 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3735 /* Variadic functions always use the base ABI. Assume that functions
3736 without debug info are not variadic. */
3737 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3738 return 0;
3739 /* The VFP ABI is only supported as a variant of AAPCS. */
3740 if (tdep->arm_abi != ARM_ABI_AAPCS)
3741 return 0;
3742 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3743 }
3744
3745 /* We currently only support passing parameters in integer registers, which
3746 conforms with GCC's default model, and VFP argument passing following
3747 the VFP variant of AAPCS. Several other variants exist and
3748 we should probably support some of them based on the selected ABI. */
3749
3750 static CORE_ADDR
3751 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3752 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3753 struct value **args, CORE_ADDR sp, int struct_return,
3754 CORE_ADDR struct_addr)
3755 {
3756 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3757 int argnum;
3758 int argreg;
3759 int nstack;
3760 struct stack_item *si = NULL;
3761 int use_vfp_abi;
3762 struct type *ftype;
3763 unsigned vfp_regs_free = (1 << 16) - 1;
3764
3765 /* Determine the type of this function and whether the VFP ABI
3766 applies. */
3767 ftype = check_typedef (value_type (function));
3768 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3769 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3770 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3771
3772 /* Set the return address. For the ARM, the return breakpoint is
3773 always at BP_ADDR. */
3774 if (arm_pc_is_thumb (gdbarch, bp_addr))
3775 bp_addr |= 1;
3776 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3777
3778 /* Walk through the list of args and determine how large a temporary
3779 stack is required. Need to take care here as structs may be
3780 passed on the stack, and we have to push them. */
3781 nstack = 0;
3782
3783 argreg = ARM_A1_REGNUM;
3784 nstack = 0;
3785
3786 /* The struct_return pointer occupies the first parameter
3787 passing register. */
3788 if (struct_return)
3789 {
3790 if (arm_debug)
3791 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3792 gdbarch_register_name (gdbarch, argreg),
3793 paddress (gdbarch, struct_addr));
3794 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3795 argreg++;
3796 }
3797
3798 for (argnum = 0; argnum < nargs; argnum++)
3799 {
3800 int len;
3801 struct type *arg_type;
3802 struct type *target_type;
3803 enum type_code typecode;
3804 const bfd_byte *val;
3805 int align;
3806 enum arm_vfp_cprc_base_type vfp_base_type;
3807 int vfp_base_count;
3808 int may_use_core_reg = 1;
3809
3810 arg_type = check_typedef (value_type (args[argnum]));
3811 len = TYPE_LENGTH (arg_type);
3812 target_type = TYPE_TARGET_TYPE (arg_type);
3813 typecode = TYPE_CODE (arg_type);
3814 val = value_contents (args[argnum]);
3815
3816 align = arm_type_align (arg_type);
3817 /* Round alignment up to a whole number of words. */
3818 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3819 /* Different ABIs have different maximum alignments. */
3820 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3821 {
3822 /* The APCS ABI only requires word alignment. */
3823 align = INT_REGISTER_SIZE;
3824 }
3825 else
3826 {
3827 /* The AAPCS requires at most doubleword alignment. */
3828 if (align > INT_REGISTER_SIZE * 2)
3829 align = INT_REGISTER_SIZE * 2;
3830 }
3831
3832 if (use_vfp_abi
3833 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3834 &vfp_base_count))
3835 {
3836 int regno;
3837 int unit_length;
3838 int shift;
3839 unsigned mask;
3840
3841 /* Because this is a CPRC it cannot go in a core register or
3842 cause a core register to be skipped for alignment.
3843 Either it goes in VFP registers and the rest of this loop
3844 iteration is skipped for this argument, or it goes on the
3845 stack (and the stack alignment code is correct for this
3846 case). */
3847 may_use_core_reg = 0;
3848
3849 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3850 shift = unit_length / 4;
3851 mask = (1 << (shift * vfp_base_count)) - 1;
3852 for (regno = 0; regno < 16; regno += shift)
3853 if (((vfp_regs_free >> regno) & mask) == mask)
3854 break;
3855
3856 if (regno < 16)
3857 {
3858 int reg_char;
3859 int reg_scaled;
3860 int i;
3861
3862 vfp_regs_free &= ~(mask << regno);
3863 reg_scaled = regno / shift;
3864 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3865 for (i = 0; i < vfp_base_count; i++)
3866 {
3867 char name_buf[4];
3868 int regnum;
3869 if (reg_char == 'q')
3870 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3871 val + i * unit_length);
3872 else
3873 {
3874 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3875 reg_char, reg_scaled + i);
3876 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3877 strlen (name_buf));
3878 regcache_cooked_write (regcache, regnum,
3879 val + i * unit_length);
3880 }
3881 }
3882 continue;
3883 }
3884 else
3885 {
3886 /* This CPRC could not go in VFP registers, so all VFP
3887 registers are now marked as used. */
3888 vfp_regs_free = 0;
3889 }
3890 }
3891
3892 /* Push stack padding for dowubleword alignment. */
3893 if (nstack & (align - 1))
3894 {
3895 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3896 nstack += INT_REGISTER_SIZE;
3897 }
3898
3899 /* Doubleword aligned quantities must go in even register pairs. */
3900 if (may_use_core_reg
3901 && argreg <= ARM_LAST_ARG_REGNUM
3902 && align > INT_REGISTER_SIZE
3903 && argreg & 1)
3904 argreg++;
3905
3906 /* If the argument is a pointer to a function, and it is a
3907 Thumb function, create a LOCAL copy of the value and set
3908 the THUMB bit in it. */
3909 if (TYPE_CODE_PTR == typecode
3910 && target_type != NULL
3911 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3912 {
3913 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3914 if (arm_pc_is_thumb (gdbarch, regval))
3915 {
3916 bfd_byte *copy = alloca (len);
3917 store_unsigned_integer (copy, len, byte_order,
3918 MAKE_THUMB_ADDR (regval));
3919 val = copy;
3920 }
3921 }
3922
3923 /* Copy the argument to general registers or the stack in
3924 register-sized pieces. Large arguments are split between
3925 registers and stack. */
3926 while (len > 0)
3927 {
3928 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3929
3930 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3931 {
3932 /* The argument is being passed in a general purpose
3933 register. */
3934 CORE_ADDR regval
3935 = extract_unsigned_integer (val, partial_len, byte_order);
3936 if (byte_order == BFD_ENDIAN_BIG)
3937 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3938 if (arm_debug)
3939 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3940 argnum,
3941 gdbarch_register_name
3942 (gdbarch, argreg),
3943 phex (regval, INT_REGISTER_SIZE));
3944 regcache_cooked_write_unsigned (regcache, argreg, regval);
3945 argreg++;
3946 }
3947 else
3948 {
3949 /* Push the arguments onto the stack. */
3950 if (arm_debug)
3951 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3952 argnum, nstack);
3953 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3954 nstack += INT_REGISTER_SIZE;
3955 }
3956
3957 len -= partial_len;
3958 val += partial_len;
3959 }
3960 }
3961 /* If we have an odd number of words to push, then decrement the stack
3962 by one word now, so first stack argument will be dword aligned. */
3963 if (nstack & 4)
3964 sp -= 4;
3965
3966 while (si)
3967 {
3968 sp -= si->len;
3969 write_memory (sp, si->data, si->len);
3970 si = pop_stack_item (si);
3971 }
3972
3973 /* Finally, update teh SP register. */
3974 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3975
3976 return sp;
3977 }
3978
3979
3980 /* Always align the frame to an 8-byte boundary. This is required on
3981 some platforms and harmless on the rest. */
3982
3983 static CORE_ADDR
3984 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3985 {
3986 /* Align the stack to eight bytes. */
3987 return sp & ~ (CORE_ADDR) 7;
3988 }
3989
3990 static void
3991 print_fpu_flags (struct ui_file *file, int flags)
3992 {
3993 if (flags & (1 << 0))
3994 fputs_filtered ("IVO ", file);
3995 if (flags & (1 << 1))
3996 fputs_filtered ("DVZ ", file);
3997 if (flags & (1 << 2))
3998 fputs_filtered ("OFL ", file);
3999 if (flags & (1 << 3))
4000 fputs_filtered ("UFL ", file);
4001 if (flags & (1 << 4))
4002 fputs_filtered ("INX ", file);
4003 fputc_filtered ('\n', file);
4004 }
4005
4006 /* Print interesting information about the floating point processor
4007 (if present) or emulator. */
4008 static void
4009 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4010 struct frame_info *frame, const char *args)
4011 {
4012 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4013 int type;
4014
4015 type = (status >> 24) & 127;
4016 if (status & (1 << 31))
4017 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4018 else
4019 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4020 /* i18n: [floating point unit] mask */
4021 fputs_filtered (_("mask: "), file);
4022 print_fpu_flags (file, status >> 16);
4023 /* i18n: [floating point unit] flags */
4024 fputs_filtered (_("flags: "), file);
4025 print_fpu_flags (file, status);
4026 }
4027
4028 /* Construct the ARM extended floating point type. */
4029 static struct type *
4030 arm_ext_type (struct gdbarch *gdbarch)
4031 {
4032 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4033
4034 if (!tdep->arm_ext_type)
4035 tdep->arm_ext_type
4036 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4037 floatformats_arm_ext);
4038
4039 return tdep->arm_ext_type;
4040 }
4041
4042 static struct type *
4043 arm_neon_double_type (struct gdbarch *gdbarch)
4044 {
4045 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4046
4047 if (tdep->neon_double_type == NULL)
4048 {
4049 struct type *t, *elem;
4050
4051 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4052 TYPE_CODE_UNION);
4053 elem = builtin_type (gdbarch)->builtin_uint8;
4054 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4055 elem = builtin_type (gdbarch)->builtin_uint16;
4056 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4057 elem = builtin_type (gdbarch)->builtin_uint32;
4058 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4059 elem = builtin_type (gdbarch)->builtin_uint64;
4060 append_composite_type_field (t, "u64", elem);
4061 elem = builtin_type (gdbarch)->builtin_float;
4062 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4063 elem = builtin_type (gdbarch)->builtin_double;
4064 append_composite_type_field (t, "f64", elem);
4065
4066 TYPE_VECTOR (t) = 1;
4067 TYPE_NAME (t) = "neon_d";
4068 tdep->neon_double_type = t;
4069 }
4070
4071 return tdep->neon_double_type;
4072 }
4073
4074 /* FIXME: The vector types are not correctly ordered on big-endian
4075 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4076 bits of d0 - regardless of what unit size is being held in d0. So
4077 the offset of the first uint8 in d0 is 7, but the offset of the
4078 first float is 4. This code works as-is for little-endian
4079 targets. */
4080
4081 static struct type *
4082 arm_neon_quad_type (struct gdbarch *gdbarch)
4083 {
4084 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4085
4086 if (tdep->neon_quad_type == NULL)
4087 {
4088 struct type *t, *elem;
4089
4090 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4091 TYPE_CODE_UNION);
4092 elem = builtin_type (gdbarch)->builtin_uint8;
4093 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4094 elem = builtin_type (gdbarch)->builtin_uint16;
4095 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4096 elem = builtin_type (gdbarch)->builtin_uint32;
4097 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4098 elem = builtin_type (gdbarch)->builtin_uint64;
4099 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4100 elem = builtin_type (gdbarch)->builtin_float;
4101 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4102 elem = builtin_type (gdbarch)->builtin_double;
4103 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4104
4105 TYPE_VECTOR (t) = 1;
4106 TYPE_NAME (t) = "neon_q";
4107 tdep->neon_quad_type = t;
4108 }
4109
4110 return tdep->neon_quad_type;
4111 }
4112
4113 /* Return the GDB type object for the "standard" data type of data in
4114 register N. */
4115
4116 static struct type *
4117 arm_register_type (struct gdbarch *gdbarch, int regnum)
4118 {
4119 int num_regs = gdbarch_num_regs (gdbarch);
4120
4121 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4122 && regnum >= num_regs && regnum < num_regs + 32)
4123 return builtin_type (gdbarch)->builtin_float;
4124
4125 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4126 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4127 return arm_neon_quad_type (gdbarch);
4128
4129 /* If the target description has register information, we are only
4130 in this function so that we can override the types of
4131 double-precision registers for NEON. */
4132 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4133 {
4134 struct type *t = tdesc_register_type (gdbarch, regnum);
4135
4136 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4137 && TYPE_CODE (t) == TYPE_CODE_FLT
4138 && gdbarch_tdep (gdbarch)->have_neon)
4139 return arm_neon_double_type (gdbarch);
4140 else
4141 return t;
4142 }
4143
4144 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4145 {
4146 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4147 return builtin_type (gdbarch)->builtin_void;
4148
4149 return arm_ext_type (gdbarch);
4150 }
4151 else if (regnum == ARM_SP_REGNUM)
4152 return builtin_type (gdbarch)->builtin_data_ptr;
4153 else if (regnum == ARM_PC_REGNUM)
4154 return builtin_type (gdbarch)->builtin_func_ptr;
4155 else if (regnum >= ARRAY_SIZE (arm_register_names))
4156 /* These registers are only supported on targets which supply
4157 an XML description. */
4158 return builtin_type (gdbarch)->builtin_int0;
4159 else
4160 return builtin_type (gdbarch)->builtin_uint32;
4161 }
4162
4163 /* Map a DWARF register REGNUM onto the appropriate GDB register
4164 number. */
4165
4166 static int
4167 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4168 {
4169 /* Core integer regs. */
4170 if (reg >= 0 && reg <= 15)
4171 return reg;
4172
4173 /* Legacy FPA encoding. These were once used in a way which
4174 overlapped with VFP register numbering, so their use is
4175 discouraged, but GDB doesn't support the ARM toolchain
4176 which used them for VFP. */
4177 if (reg >= 16 && reg <= 23)
4178 return ARM_F0_REGNUM + reg - 16;
4179
4180 /* New assignments for the FPA registers. */
4181 if (reg >= 96 && reg <= 103)
4182 return ARM_F0_REGNUM + reg - 96;
4183
4184 /* WMMX register assignments. */
4185 if (reg >= 104 && reg <= 111)
4186 return ARM_WCGR0_REGNUM + reg - 104;
4187
4188 if (reg >= 112 && reg <= 127)
4189 return ARM_WR0_REGNUM + reg - 112;
4190
4191 if (reg >= 192 && reg <= 199)
4192 return ARM_WC0_REGNUM + reg - 192;
4193
4194 /* VFP v2 registers. A double precision value is actually
4195 in d1 rather than s2, but the ABI only defines numbering
4196 for the single precision registers. This will "just work"
4197 in GDB for little endian targets (we'll read eight bytes,
4198 starting in s0 and then progressing to s1), but will be
4199 reversed on big endian targets with VFP. This won't
4200 be a problem for the new Neon quad registers; you're supposed
4201 to use DW_OP_piece for those. */
4202 if (reg >= 64 && reg <= 95)
4203 {
4204 char name_buf[4];
4205
4206 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4207 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4208 strlen (name_buf));
4209 }
4210
4211 /* VFP v3 / Neon registers. This range is also used for VFP v2
4212 registers, except that it now describes d0 instead of s0. */
4213 if (reg >= 256 && reg <= 287)
4214 {
4215 char name_buf[4];
4216
4217 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4218 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4219 strlen (name_buf));
4220 }
4221
4222 return -1;
4223 }
4224
4225 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4226 static int
4227 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4228 {
4229 int reg = regnum;
4230 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4231
4232 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4233 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4234
4235 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4236 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4237
4238 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4239 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4240
4241 if (reg < NUM_GREGS)
4242 return SIM_ARM_R0_REGNUM + reg;
4243 reg -= NUM_GREGS;
4244
4245 if (reg < NUM_FREGS)
4246 return SIM_ARM_FP0_REGNUM + reg;
4247 reg -= NUM_FREGS;
4248
4249 if (reg < NUM_SREGS)
4250 return SIM_ARM_FPS_REGNUM + reg;
4251 reg -= NUM_SREGS;
4252
4253 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4254 }
4255
4256 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4257 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4258 It is thought that this is is the floating-point register format on
4259 little-endian systems. */
4260
4261 static void
4262 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4263 void *dbl, int endianess)
4264 {
4265 DOUBLEST d;
4266
4267 if (endianess == BFD_ENDIAN_BIG)
4268 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4269 else
4270 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4271 ptr, &d);
4272 floatformat_from_doublest (fmt, &d, dbl);
4273 }
4274
4275 static void
4276 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4277 int endianess)
4278 {
4279 DOUBLEST d;
4280
4281 floatformat_to_doublest (fmt, ptr, &d);
4282 if (endianess == BFD_ENDIAN_BIG)
4283 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4284 else
4285 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4286 &d, dbl);
4287 }
4288
4289 static int
4290 condition_true (unsigned long cond, unsigned long status_reg)
4291 {
4292 if (cond == INST_AL || cond == INST_NV)
4293 return 1;
4294
4295 switch (cond)
4296 {
4297 case INST_EQ:
4298 return ((status_reg & FLAG_Z) != 0);
4299 case INST_NE:
4300 return ((status_reg & FLAG_Z) == 0);
4301 case INST_CS:
4302 return ((status_reg & FLAG_C) != 0);
4303 case INST_CC:
4304 return ((status_reg & FLAG_C) == 0);
4305 case INST_MI:
4306 return ((status_reg & FLAG_N) != 0);
4307 case INST_PL:
4308 return ((status_reg & FLAG_N) == 0);
4309 case INST_VS:
4310 return ((status_reg & FLAG_V) != 0);
4311 case INST_VC:
4312 return ((status_reg & FLAG_V) == 0);
4313 case INST_HI:
4314 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4315 case INST_LS:
4316 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4317 case INST_GE:
4318 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4319 case INST_LT:
4320 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4321 case INST_GT:
4322 return (((status_reg & FLAG_Z) == 0)
4323 && (((status_reg & FLAG_N) == 0)
4324 == ((status_reg & FLAG_V) == 0)));
4325 case INST_LE:
4326 return (((status_reg & FLAG_Z) != 0)
4327 || (((status_reg & FLAG_N) == 0)
4328 != ((status_reg & FLAG_V) == 0)));
4329 }
4330 return 1;
4331 }
4332
4333 static unsigned long
4334 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4335 unsigned long pc_val, unsigned long status_reg)
4336 {
4337 unsigned long res, shift;
4338 int rm = bits (inst, 0, 3);
4339 unsigned long shifttype = bits (inst, 5, 6);
4340
4341 if (bit (inst, 4))
4342 {
4343 int rs = bits (inst, 8, 11);
4344 shift = (rs == 15 ? pc_val + 8
4345 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4346 }
4347 else
4348 shift = bits (inst, 7, 11);
4349
4350 res = (rm == ARM_PC_REGNUM
4351 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4352 : get_frame_register_unsigned (frame, rm));
4353
4354 switch (shifttype)
4355 {
4356 case 0: /* LSL */
4357 res = shift >= 32 ? 0 : res << shift;
4358 break;
4359
4360 case 1: /* LSR */
4361 res = shift >= 32 ? 0 : res >> shift;
4362 break;
4363
4364 case 2: /* ASR */
4365 if (shift >= 32)
4366 shift = 31;
4367 res = ((res & 0x80000000L)
4368 ? ~((~res) >> shift) : res >> shift);
4369 break;
4370
4371 case 3: /* ROR/RRX */
4372 shift &= 31;
4373 if (shift == 0)
4374 res = (res >> 1) | (carry ? 0x80000000L : 0);
4375 else
4376 res = (res >> shift) | (res << (32 - shift));
4377 break;
4378 }
4379
4380 return res & 0xffffffff;
4381 }
4382
4383 /* Return number of 1-bits in VAL. */
4384
4385 static int
4386 bitcount (unsigned long val)
4387 {
4388 int nbits;
4389 for (nbits = 0; val != 0; nbits++)
4390 val &= val - 1; /* Delete rightmost 1-bit in val. */
4391 return nbits;
4392 }
4393
4394 /* Return the size in bytes of the complete Thumb instruction whose
4395 first halfword is INST1. */
4396
4397 static int
4398 thumb_insn_size (unsigned short inst1)
4399 {
4400 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4401 return 4;
4402 else
4403 return 2;
4404 }
4405
4406 static int
4407 thumb_advance_itstate (unsigned int itstate)
4408 {
4409 /* Preserve IT[7:5], the first three bits of the condition. Shift
4410 the upcoming condition flags left by one bit. */
4411 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4412
4413 /* If we have finished the IT block, clear the state. */
4414 if ((itstate & 0x0f) == 0)
4415 itstate = 0;
4416
4417 return itstate;
4418 }
4419
4420 /* Find the next PC after the current instruction executes. In some
4421 cases we can not statically determine the answer (see the IT state
4422 handling in this function); in that case, a breakpoint may be
4423 inserted in addition to the returned PC, which will be used to set
4424 another breakpoint by our caller. */
4425
4426 static CORE_ADDR
4427 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4428 {
4429 struct gdbarch *gdbarch = get_frame_arch (frame);
4430 struct address_space *aspace = get_frame_address_space (frame);
4431 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4432 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4433 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4434 unsigned short inst1;
4435 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4436 unsigned long offset;
4437 ULONGEST status, itstate;
4438
4439 nextpc = MAKE_THUMB_ADDR (nextpc);
4440 pc_val = MAKE_THUMB_ADDR (pc_val);
4441
4442 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4443
4444 /* Thumb-2 conditional execution support. There are eight bits in
4445 the CPSR which describe conditional execution state. Once
4446 reconstructed (they're in a funny order), the low five bits
4447 describe the low bit of the condition for each instruction and
4448 how many instructions remain. The high three bits describe the
4449 base condition. One of the low four bits will be set if an IT
4450 block is active. These bits read as zero on earlier
4451 processors. */
4452 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4453 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4454
4455 /* If-Then handling. On GNU/Linux, where this routine is used, we
4456 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4457 can disable execution of the undefined instruction. So we might
4458 miss the breakpoint if we set it on a skipped conditional
4459 instruction. Because conditional instructions can change the
4460 flags, affecting the execution of further instructions, we may
4461 need to set two breakpoints. */
4462
4463 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4464 {
4465 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4466 {
4467 /* An IT instruction. Because this instruction does not
4468 modify the flags, we can accurately predict the next
4469 executed instruction. */
4470 itstate = inst1 & 0x00ff;
4471 pc += thumb_insn_size (inst1);
4472
4473 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4474 {
4475 inst1 = read_memory_unsigned_integer (pc, 2,
4476 byte_order_for_code);
4477 pc += thumb_insn_size (inst1);
4478 itstate = thumb_advance_itstate (itstate);
4479 }
4480
4481 return MAKE_THUMB_ADDR (pc);
4482 }
4483 else if (itstate != 0)
4484 {
4485 /* We are in a conditional block. Check the condition. */
4486 if (! condition_true (itstate >> 4, status))
4487 {
4488 /* Advance to the next executed instruction. */
4489 pc += thumb_insn_size (inst1);
4490 itstate = thumb_advance_itstate (itstate);
4491
4492 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4493 {
4494 inst1 = read_memory_unsigned_integer (pc, 2,
4495 byte_order_for_code);
4496 pc += thumb_insn_size (inst1);
4497 itstate = thumb_advance_itstate (itstate);
4498 }
4499
4500 return MAKE_THUMB_ADDR (pc);
4501 }
4502 else if ((itstate & 0x0f) == 0x08)
4503 {
4504 /* This is the last instruction of the conditional
4505 block, and it is executed. We can handle it normally
4506 because the following instruction is not conditional,
4507 and we must handle it normally because it is
4508 permitted to branch. Fall through. */
4509 }
4510 else
4511 {
4512 int cond_negated;
4513
4514 /* There are conditional instructions after this one.
4515 If this instruction modifies the flags, then we can
4516 not predict what the next executed instruction will
4517 be. Fortunately, this instruction is architecturally
4518 forbidden to branch; we know it will fall through.
4519 Start by skipping past it. */
4520 pc += thumb_insn_size (inst1);
4521 itstate = thumb_advance_itstate (itstate);
4522
4523 /* Set a breakpoint on the following instruction. */
4524 gdb_assert ((itstate & 0x0f) != 0);
4525 arm_insert_single_step_breakpoint (gdbarch, aspace,
4526 MAKE_THUMB_ADDR (pc));
4527 cond_negated = (itstate >> 4) & 1;
4528
4529 /* Skip all following instructions with the same
4530 condition. If there is a later instruction in the IT
4531 block with the opposite condition, set the other
4532 breakpoint there. If not, then set a breakpoint on
4533 the instruction after the IT block. */
4534 do
4535 {
4536 inst1 = read_memory_unsigned_integer (pc, 2,
4537 byte_order_for_code);
4538 pc += thumb_insn_size (inst1);
4539 itstate = thumb_advance_itstate (itstate);
4540 }
4541 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4542
4543 return MAKE_THUMB_ADDR (pc);
4544 }
4545 }
4546 }
4547 else if (itstate & 0x0f)
4548 {
4549 /* We are in a conditional block. Check the condition. */
4550 int cond = itstate >> 4;
4551
4552 if (! condition_true (cond, status))
4553 /* Advance to the next instruction. All the 32-bit
4554 instructions share a common prefix. */
4555 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4556
4557 /* Otherwise, handle the instruction normally. */
4558 }
4559
4560 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4561 {
4562 CORE_ADDR sp;
4563
4564 /* Fetch the saved PC from the stack. It's stored above
4565 all of the other registers. */
4566 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4567 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4568 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4569 }
4570 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4571 {
4572 unsigned long cond = bits (inst1, 8, 11);
4573 if (cond == 0x0f) /* 0x0f = SWI */
4574 {
4575 struct gdbarch_tdep *tdep;
4576 tdep = gdbarch_tdep (gdbarch);
4577
4578 if (tdep->syscall_next_pc != NULL)
4579 nextpc = tdep->syscall_next_pc (frame);
4580
4581 }
4582 else if (cond != 0x0f && condition_true (cond, status))
4583 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4584 }
4585 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4586 {
4587 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4588 }
4589 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4590 {
4591 unsigned short inst2;
4592 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4593
4594 /* Default to the next instruction. */
4595 nextpc = pc + 4;
4596 nextpc = MAKE_THUMB_ADDR (nextpc);
4597
4598 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4599 {
4600 /* Branches and miscellaneous control instructions. */
4601
4602 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4603 {
4604 /* B, BL, BLX. */
4605 int j1, j2, imm1, imm2;
4606
4607 imm1 = sbits (inst1, 0, 10);
4608 imm2 = bits (inst2, 0, 10);
4609 j1 = bit (inst2, 13);
4610 j2 = bit (inst2, 11);
4611
4612 offset = ((imm1 << 12) + (imm2 << 1));
4613 offset ^= ((!j2) << 22) | ((!j1) << 23);
4614
4615 nextpc = pc_val + offset;
4616 /* For BLX make sure to clear the low bits. */
4617 if (bit (inst2, 12) == 0)
4618 nextpc = nextpc & 0xfffffffc;
4619 }
4620 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4621 {
4622 /* SUBS PC, LR, #imm8. */
4623 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4624 nextpc -= inst2 & 0x00ff;
4625 }
4626 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4627 {
4628 /* Conditional branch. */
4629 if (condition_true (bits (inst1, 6, 9), status))
4630 {
4631 int sign, j1, j2, imm1, imm2;
4632
4633 sign = sbits (inst1, 10, 10);
4634 imm1 = bits (inst1, 0, 5);
4635 imm2 = bits (inst2, 0, 10);
4636 j1 = bit (inst2, 13);
4637 j2 = bit (inst2, 11);
4638
4639 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4640 offset += (imm1 << 12) + (imm2 << 1);
4641
4642 nextpc = pc_val + offset;
4643 }
4644 }
4645 }
4646 else if ((inst1 & 0xfe50) == 0xe810)
4647 {
4648 /* Load multiple or RFE. */
4649 int rn, offset, load_pc = 1;
4650
4651 rn = bits (inst1, 0, 3);
4652 if (bit (inst1, 7) && !bit (inst1, 8))
4653 {
4654 /* LDMIA or POP */
4655 if (!bit (inst2, 15))
4656 load_pc = 0;
4657 offset = bitcount (inst2) * 4 - 4;
4658 }
4659 else if (!bit (inst1, 7) && bit (inst1, 8))
4660 {
4661 /* LDMDB */
4662 if (!bit (inst2, 15))
4663 load_pc = 0;
4664 offset = -4;
4665 }
4666 else if (bit (inst1, 7) && bit (inst1, 8))
4667 {
4668 /* RFEIA */
4669 offset = 0;
4670 }
4671 else if (!bit (inst1, 7) && !bit (inst1, 8))
4672 {
4673 /* RFEDB */
4674 offset = -8;
4675 }
4676 else
4677 load_pc = 0;
4678
4679 if (load_pc)
4680 {
4681 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4682 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4683 }
4684 }
4685 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4686 {
4687 /* MOV PC or MOVS PC. */
4688 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4689 nextpc = MAKE_THUMB_ADDR (nextpc);
4690 }
4691 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4692 {
4693 /* LDR PC. */
4694 CORE_ADDR base;
4695 int rn, load_pc = 1;
4696
4697 rn = bits (inst1, 0, 3);
4698 base = get_frame_register_unsigned (frame, rn);
4699 if (rn == ARM_PC_REGNUM)
4700 {
4701 base = (base + 4) & ~(CORE_ADDR) 0x3;
4702 if (bit (inst1, 7))
4703 base += bits (inst2, 0, 11);
4704 else
4705 base -= bits (inst2, 0, 11);
4706 }
4707 else if (bit (inst1, 7))
4708 base += bits (inst2, 0, 11);
4709 else if (bit (inst2, 11))
4710 {
4711 if (bit (inst2, 10))
4712 {
4713 if (bit (inst2, 9))
4714 base += bits (inst2, 0, 7);
4715 else
4716 base -= bits (inst2, 0, 7);
4717 }
4718 }
4719 else if ((inst2 & 0x0fc0) == 0x0000)
4720 {
4721 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4722 base += get_frame_register_unsigned (frame, rm) << shift;
4723 }
4724 else
4725 /* Reserved. */
4726 load_pc = 0;
4727
4728 if (load_pc)
4729 nextpc = get_frame_memory_unsigned (frame, base, 4);
4730 }
4731 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4732 {
4733 /* TBB. */
4734 CORE_ADDR tbl_reg, table, offset, length;
4735
4736 tbl_reg = bits (inst1, 0, 3);
4737 if (tbl_reg == 0x0f)
4738 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4739 else
4740 table = get_frame_register_unsigned (frame, tbl_reg);
4741
4742 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4743 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4744 nextpc = pc_val + length;
4745 }
4746 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4747 {
4748 /* TBH. */
4749 CORE_ADDR tbl_reg, table, offset, length;
4750
4751 tbl_reg = bits (inst1, 0, 3);
4752 if (tbl_reg == 0x0f)
4753 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4754 else
4755 table = get_frame_register_unsigned (frame, tbl_reg);
4756
4757 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4758 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4759 nextpc = pc_val + length;
4760 }
4761 }
4762 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4763 {
4764 if (bits (inst1, 3, 6) == 0x0f)
4765 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4766 else
4767 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4768 }
4769 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4770 {
4771 if (bits (inst1, 3, 6) == 0x0f)
4772 nextpc = pc_val;
4773 else
4774 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4775
4776 nextpc = MAKE_THUMB_ADDR (nextpc);
4777 }
4778 else if ((inst1 & 0xf500) == 0xb100)
4779 {
4780 /* CBNZ or CBZ. */
4781 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4782 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4783
4784 if (bit (inst1, 11) && reg != 0)
4785 nextpc = pc_val + imm;
4786 else if (!bit (inst1, 11) && reg == 0)
4787 nextpc = pc_val + imm;
4788 }
4789 return nextpc;
4790 }
4791
4792 /* Get the raw next address. PC is the current program counter, in
4793 FRAME, which is assumed to be executing in ARM mode.
4794
4795 The value returned has the execution state of the next instruction
4796 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4797 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4798 address. */
4799
4800 static CORE_ADDR
4801 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4802 {
4803 struct gdbarch *gdbarch = get_frame_arch (frame);
4804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4805 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4806 unsigned long pc_val;
4807 unsigned long this_instr;
4808 unsigned long status;
4809 CORE_ADDR nextpc;
4810
4811 pc_val = (unsigned long) pc;
4812 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4813
4814 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4815 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4816
4817 if (bits (this_instr, 28, 31) == INST_NV)
4818 switch (bits (this_instr, 24, 27))
4819 {
4820 case 0xa:
4821 case 0xb:
4822 {
4823 /* Branch with Link and change to Thumb. */
4824 nextpc = BranchDest (pc, this_instr);
4825 nextpc |= bit (this_instr, 24) << 1;
4826 nextpc = MAKE_THUMB_ADDR (nextpc);
4827 break;
4828 }
4829 case 0xc:
4830 case 0xd:
4831 case 0xe:
4832 /* Coprocessor register transfer. */
4833 if (bits (this_instr, 12, 15) == 15)
4834 error (_("Invalid update to pc in instruction"));
4835 break;
4836 }
4837 else if (condition_true (bits (this_instr, 28, 31), status))
4838 {
4839 switch (bits (this_instr, 24, 27))
4840 {
4841 case 0x0:
4842 case 0x1: /* data processing */
4843 case 0x2:
4844 case 0x3:
4845 {
4846 unsigned long operand1, operand2, result = 0;
4847 unsigned long rn;
4848 int c;
4849
4850 if (bits (this_instr, 12, 15) != 15)
4851 break;
4852
4853 if (bits (this_instr, 22, 25) == 0
4854 && bits (this_instr, 4, 7) == 9) /* multiply */
4855 error (_("Invalid update to pc in instruction"));
4856
4857 /* BX <reg>, BLX <reg> */
4858 if (bits (this_instr, 4, 27) == 0x12fff1
4859 || bits (this_instr, 4, 27) == 0x12fff3)
4860 {
4861 rn = bits (this_instr, 0, 3);
4862 nextpc = ((rn == ARM_PC_REGNUM)
4863 ? (pc_val + 8)
4864 : get_frame_register_unsigned (frame, rn));
4865
4866 return nextpc;
4867 }
4868
4869 /* Multiply into PC. */
4870 c = (status & FLAG_C) ? 1 : 0;
4871 rn = bits (this_instr, 16, 19);
4872 operand1 = ((rn == ARM_PC_REGNUM)
4873 ? (pc_val + 8)
4874 : get_frame_register_unsigned (frame, rn));
4875
4876 if (bit (this_instr, 25))
4877 {
4878 unsigned long immval = bits (this_instr, 0, 7);
4879 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4880 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4881 & 0xffffffff;
4882 }
4883 else /* operand 2 is a shifted register. */
4884 operand2 = shifted_reg_val (frame, this_instr, c,
4885 pc_val, status);
4886
4887 switch (bits (this_instr, 21, 24))
4888 {
4889 case 0x0: /*and */
4890 result = operand1 & operand2;
4891 break;
4892
4893 case 0x1: /*eor */
4894 result = operand1 ^ operand2;
4895 break;
4896
4897 case 0x2: /*sub */
4898 result = operand1 - operand2;
4899 break;
4900
4901 case 0x3: /*rsb */
4902 result = operand2 - operand1;
4903 break;
4904
4905 case 0x4: /*add */
4906 result = operand1 + operand2;
4907 break;
4908
4909 case 0x5: /*adc */
4910 result = operand1 + operand2 + c;
4911 break;
4912
4913 case 0x6: /*sbc */
4914 result = operand1 - operand2 + c;
4915 break;
4916
4917 case 0x7: /*rsc */
4918 result = operand2 - operand1 + c;
4919 break;
4920
4921 case 0x8:
4922 case 0x9:
4923 case 0xa:
4924 case 0xb: /* tst, teq, cmp, cmn */
4925 result = (unsigned long) nextpc;
4926 break;
4927
4928 case 0xc: /*orr */
4929 result = operand1 | operand2;
4930 break;
4931
4932 case 0xd: /*mov */
4933 /* Always step into a function. */
4934 result = operand2;
4935 break;
4936
4937 case 0xe: /*bic */
4938 result = operand1 & ~operand2;
4939 break;
4940
4941 case 0xf: /*mvn */
4942 result = ~operand2;
4943 break;
4944 }
4945
4946 /* In 26-bit APCS the bottom two bits of the result are
4947 ignored, and we always end up in ARM state. */
4948 if (!arm_apcs_32)
4949 nextpc = arm_addr_bits_remove (gdbarch, result);
4950 else
4951 nextpc = result;
4952
4953 break;
4954 }
4955
4956 case 0x4:
4957 case 0x5: /* data transfer */
4958 case 0x6:
4959 case 0x7:
4960 if (bit (this_instr, 20))
4961 {
4962 /* load */
4963 if (bits (this_instr, 12, 15) == 15)
4964 {
4965 /* rd == pc */
4966 unsigned long rn;
4967 unsigned long base;
4968
4969 if (bit (this_instr, 22))
4970 error (_("Invalid update to pc in instruction"));
4971
4972 /* byte write to PC */
4973 rn = bits (this_instr, 16, 19);
4974 base = ((rn == ARM_PC_REGNUM)
4975 ? (pc_val + 8)
4976 : get_frame_register_unsigned (frame, rn));
4977
4978 if (bit (this_instr, 24))
4979 {
4980 /* pre-indexed */
4981 int c = (status & FLAG_C) ? 1 : 0;
4982 unsigned long offset =
4983 (bit (this_instr, 25)
4984 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4985 : bits (this_instr, 0, 11));
4986
4987 if (bit (this_instr, 23))
4988 base += offset;
4989 else
4990 base -= offset;
4991 }
4992 nextpc =
4993 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4994 4, byte_order);
4995 }
4996 }
4997 break;
4998
4999 case 0x8:
5000 case 0x9: /* block transfer */
5001 if (bit (this_instr, 20))
5002 {
5003 /* LDM */
5004 if (bit (this_instr, 15))
5005 {
5006 /* loading pc */
5007 int offset = 0;
5008 unsigned long rn_val
5009 = get_frame_register_unsigned (frame,
5010 bits (this_instr, 16, 19));
5011
5012 if (bit (this_instr, 23))
5013 {
5014 /* up */
5015 unsigned long reglist = bits (this_instr, 0, 14);
5016 offset = bitcount (reglist) * 4;
5017 if (bit (this_instr, 24)) /* pre */
5018 offset += 4;
5019 }
5020 else if (bit (this_instr, 24))
5021 offset = -4;
5022
5023 nextpc =
5024 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5025 (rn_val + offset),
5026 4, byte_order);
5027 }
5028 }
5029 break;
5030
5031 case 0xb: /* branch & link */
5032 case 0xa: /* branch */
5033 {
5034 nextpc = BranchDest (pc, this_instr);
5035 break;
5036 }
5037
5038 case 0xc:
5039 case 0xd:
5040 case 0xe: /* coproc ops */
5041 break;
5042 case 0xf: /* SWI */
5043 {
5044 struct gdbarch_tdep *tdep;
5045 tdep = gdbarch_tdep (gdbarch);
5046
5047 if (tdep->syscall_next_pc != NULL)
5048 nextpc = tdep->syscall_next_pc (frame);
5049
5050 }
5051 break;
5052
5053 default:
5054 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5055 return (pc);
5056 }
5057 }
5058
5059 return nextpc;
5060 }
5061
5062 /* Determine next PC after current instruction executes. Will call either
5063 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5064 loop is detected. */
5065
5066 CORE_ADDR
5067 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5068 {
5069 CORE_ADDR nextpc;
5070
5071 if (arm_frame_is_thumb (frame))
5072 nextpc = thumb_get_next_pc_raw (frame, pc);
5073 else
5074 nextpc = arm_get_next_pc_raw (frame, pc);
5075
5076 return nextpc;
5077 }
5078
5079 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5080 of the appropriate mode (as encoded in the PC value), even if this
5081 differs from what would be expected according to the symbol tables. */
5082
5083 void
5084 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5085 struct address_space *aspace,
5086 CORE_ADDR pc)
5087 {
5088 struct cleanup *old_chain
5089 = make_cleanup_restore_integer (&arm_override_mode);
5090
5091 arm_override_mode = IS_THUMB_ADDR (pc);
5092 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5093
5094 insert_single_step_breakpoint (gdbarch, aspace, pc);
5095
5096 do_cleanups (old_chain);
5097 }
5098
5099 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5100 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5101 is found, attempt to step through it. A breakpoint is placed at the end of
5102 the sequence. */
5103
5104 static int
5105 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5106 {
5107 struct gdbarch *gdbarch = get_frame_arch (frame);
5108 struct address_space *aspace = get_frame_address_space (frame);
5109 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5110 CORE_ADDR pc = get_frame_pc (frame);
5111 CORE_ADDR breaks[2] = {-1, -1};
5112 CORE_ADDR loc = pc;
5113 unsigned short insn1, insn2;
5114 int insn_count;
5115 int index;
5116 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5117 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5118 ULONGEST status, itstate;
5119
5120 /* We currently do not support atomic sequences within an IT block. */
5121 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5122 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5123 if (itstate & 0x0f)
5124 return 0;
5125
5126 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5127 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5128 loc += 2;
5129 if (thumb_insn_size (insn1) != 4)
5130 return 0;
5131
5132 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5133 loc += 2;
5134 if (!((insn1 & 0xfff0) == 0xe850
5135 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5136 return 0;
5137
5138 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5139 instructions. */
5140 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5141 {
5142 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5143 loc += 2;
5144
5145 if (thumb_insn_size (insn1) != 4)
5146 {
5147 /* Assume that there is at most one conditional branch in the
5148 atomic sequence. If a conditional branch is found, put a
5149 breakpoint in its destination address. */
5150 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5151 {
5152 if (last_breakpoint > 0)
5153 return 0; /* More than one conditional branch found,
5154 fallback to the standard code. */
5155
5156 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5157 last_breakpoint++;
5158 }
5159
5160 /* We do not support atomic sequences that use any *other*
5161 instructions but conditional branches to change the PC.
5162 Fall back to standard code to avoid losing control of
5163 execution. */
5164 else if (thumb_instruction_changes_pc (insn1))
5165 return 0;
5166 }
5167 else
5168 {
5169 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5170 loc += 2;
5171
5172 /* Assume that there is at most one conditional branch in the
5173 atomic sequence. If a conditional branch is found, put a
5174 breakpoint in its destination address. */
5175 if ((insn1 & 0xf800) == 0xf000
5176 && (insn2 & 0xd000) == 0x8000
5177 && (insn1 & 0x0380) != 0x0380)
5178 {
5179 int sign, j1, j2, imm1, imm2;
5180 unsigned int offset;
5181
5182 sign = sbits (insn1, 10, 10);
5183 imm1 = bits (insn1, 0, 5);
5184 imm2 = bits (insn2, 0, 10);
5185 j1 = bit (insn2, 13);
5186 j2 = bit (insn2, 11);
5187
5188 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5189 offset += (imm1 << 12) + (imm2 << 1);
5190
5191 if (last_breakpoint > 0)
5192 return 0; /* More than one conditional branch found,
5193 fallback to the standard code. */
5194
5195 breaks[1] = loc + offset;
5196 last_breakpoint++;
5197 }
5198
5199 /* We do not support atomic sequences that use any *other*
5200 instructions but conditional branches to change the PC.
5201 Fall back to standard code to avoid losing control of
5202 execution. */
5203 else if (thumb2_instruction_changes_pc (insn1, insn2))
5204 return 0;
5205
5206 /* If we find a strex{,b,h,d}, we're done. */
5207 if ((insn1 & 0xfff0) == 0xe840
5208 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5209 break;
5210 }
5211 }
5212
5213 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5214 if (insn_count == atomic_sequence_length)
5215 return 0;
5216
5217 /* Insert a breakpoint right after the end of the atomic sequence. */
5218 breaks[0] = loc;
5219
5220 /* Check for duplicated breakpoints. Check also for a breakpoint
5221 placed (branch instruction's destination) anywhere in sequence. */
5222 if (last_breakpoint
5223 && (breaks[1] == breaks[0]
5224 || (breaks[1] >= pc && breaks[1] < loc)))
5225 last_breakpoint = 0;
5226
5227 /* Effectively inserts the breakpoints. */
5228 for (index = 0; index <= last_breakpoint; index++)
5229 arm_insert_single_step_breakpoint (gdbarch, aspace,
5230 MAKE_THUMB_ADDR (breaks[index]));
5231
5232 return 1;
5233 }
5234
5235 static int
5236 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5237 {
5238 struct gdbarch *gdbarch = get_frame_arch (frame);
5239 struct address_space *aspace = get_frame_address_space (frame);
5240 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5241 CORE_ADDR pc = get_frame_pc (frame);
5242 CORE_ADDR breaks[2] = {-1, -1};
5243 CORE_ADDR loc = pc;
5244 unsigned int insn;
5245 int insn_count;
5246 int index;
5247 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5248 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5249
5250 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5251 Note that we do not currently support conditionally executed atomic
5252 instructions. */
5253 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5254 loc += 4;
5255 if ((insn & 0xff9000f0) != 0xe1900090)
5256 return 0;
5257
5258 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5259 instructions. */
5260 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5261 {
5262 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5263 loc += 4;
5264
5265 /* Assume that there is at most one conditional branch in the atomic
5266 sequence. If a conditional branch is found, put a breakpoint in
5267 its destination address. */
5268 if (bits (insn, 24, 27) == 0xa)
5269 {
5270 if (last_breakpoint > 0)
5271 return 0; /* More than one conditional branch found, fallback
5272 to the standard single-step code. */
5273
5274 breaks[1] = BranchDest (loc - 4, insn);
5275 last_breakpoint++;
5276 }
5277
5278 /* We do not support atomic sequences that use any *other* instructions
5279 but conditional branches to change the PC. Fall back to standard
5280 code to avoid losing control of execution. */
5281 else if (arm_instruction_changes_pc (insn))
5282 return 0;
5283
5284 /* If we find a strex{,b,h,d}, we're done. */
5285 if ((insn & 0xff9000f0) == 0xe1800090)
5286 break;
5287 }
5288
5289 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5290 if (insn_count == atomic_sequence_length)
5291 return 0;
5292
5293 /* Insert a breakpoint right after the end of the atomic sequence. */
5294 breaks[0] = loc;
5295
5296 /* Check for duplicated breakpoints. Check also for a breakpoint
5297 placed (branch instruction's destination) anywhere in sequence. */
5298 if (last_breakpoint
5299 && (breaks[1] == breaks[0]
5300 || (breaks[1] >= pc && breaks[1] < loc)))
5301 last_breakpoint = 0;
5302
5303 /* Effectively inserts the breakpoints. */
5304 for (index = 0; index <= last_breakpoint; index++)
5305 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5306
5307 return 1;
5308 }
5309
5310 int
5311 arm_deal_with_atomic_sequence (struct frame_info *frame)
5312 {
5313 if (arm_frame_is_thumb (frame))
5314 return thumb_deal_with_atomic_sequence_raw (frame);
5315 else
5316 return arm_deal_with_atomic_sequence_raw (frame);
5317 }
5318
5319 /* single_step() is called just before we want to resume the inferior,
5320 if we want to single-step it but there is no hardware or kernel
5321 single-step support. We find the target of the coming instruction
5322 and breakpoint it. */
5323
5324 int
5325 arm_software_single_step (struct frame_info *frame)
5326 {
5327 struct gdbarch *gdbarch = get_frame_arch (frame);
5328 struct address_space *aspace = get_frame_address_space (frame);
5329 CORE_ADDR next_pc;
5330
5331 if (arm_deal_with_atomic_sequence (frame))
5332 return 1;
5333
5334 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5335 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5336
5337 return 1;
5338 }
5339
5340 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5341 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5342 NULL if an error occurs. BUF is freed. */
5343
5344 static gdb_byte *
5345 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5346 int old_len, int new_len)
5347 {
5348 gdb_byte *new_buf;
5349 int bytes_to_read = new_len - old_len;
5350
5351 new_buf = xmalloc (new_len);
5352 memcpy (new_buf + bytes_to_read, buf, old_len);
5353 xfree (buf);
5354 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5355 {
5356 xfree (new_buf);
5357 return NULL;
5358 }
5359 return new_buf;
5360 }
5361
5362 /* An IT block is at most the 2-byte IT instruction followed by
5363 four 4-byte instructions. The furthest back we must search to
5364 find an IT block that affects the current instruction is thus
5365 2 + 3 * 4 == 14 bytes. */
5366 #define MAX_IT_BLOCK_PREFIX 14
5367
5368 /* Use a quick scan if there are more than this many bytes of
5369 code. */
5370 #define IT_SCAN_THRESHOLD 32
5371
5372 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5373 A breakpoint in an IT block may not be hit, depending on the
5374 condition flags. */
5375 static CORE_ADDR
5376 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5377 {
5378 gdb_byte *buf;
5379 char map_type;
5380 CORE_ADDR boundary, func_start;
5381 int buf_len;
5382 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5383 int i, any, last_it, last_it_count;
5384
5385 /* If we are using BKPT breakpoints, none of this is necessary. */
5386 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5387 return bpaddr;
5388
5389 /* ARM mode does not have this problem. */
5390 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5391 return bpaddr;
5392
5393 /* We are setting a breakpoint in Thumb code that could potentially
5394 contain an IT block. The first step is to find how much Thumb
5395 code there is; we do not need to read outside of known Thumb
5396 sequences. */
5397 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5398 if (map_type == 0)
5399 /* Thumb-2 code must have mapping symbols to have a chance. */
5400 return bpaddr;
5401
5402 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5403
5404 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5405 && func_start > boundary)
5406 boundary = func_start;
5407
5408 /* Search for a candidate IT instruction. We have to do some fancy
5409 footwork to distinguish a real IT instruction from the second
5410 half of a 32-bit instruction, but there is no need for that if
5411 there's no candidate. */
5412 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5413 if (buf_len == 0)
5414 /* No room for an IT instruction. */
5415 return bpaddr;
5416
5417 buf = xmalloc (buf_len);
5418 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5419 return bpaddr;
5420 any = 0;
5421 for (i = 0; i < buf_len; i += 2)
5422 {
5423 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5424 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5425 {
5426 any = 1;
5427 break;
5428 }
5429 }
5430 if (any == 0)
5431 {
5432 xfree (buf);
5433 return bpaddr;
5434 }
5435
5436 /* OK, the code bytes before this instruction contain at least one
5437 halfword which resembles an IT instruction. We know that it's
5438 Thumb code, but there are still two possibilities. Either the
5439 halfword really is an IT instruction, or it is the second half of
5440 a 32-bit Thumb instruction. The only way we can tell is to
5441 scan forwards from a known instruction boundary. */
5442 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5443 {
5444 int definite;
5445
5446 /* There's a lot of code before this instruction. Start with an
5447 optimistic search; it's easy to recognize halfwords that can
5448 not be the start of a 32-bit instruction, and use that to
5449 lock on to the instruction boundaries. */
5450 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5451 if (buf == NULL)
5452 return bpaddr;
5453 buf_len = IT_SCAN_THRESHOLD;
5454
5455 definite = 0;
5456 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5457 {
5458 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5459 if (thumb_insn_size (inst1) == 2)
5460 {
5461 definite = 1;
5462 break;
5463 }
5464 }
5465
5466 /* At this point, if DEFINITE, BUF[I] is the first place we
5467 are sure that we know the instruction boundaries, and it is far
5468 enough from BPADDR that we could not miss an IT instruction
5469 affecting BPADDR. If ! DEFINITE, give up - start from a
5470 known boundary. */
5471 if (! definite)
5472 {
5473 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5474 bpaddr - boundary);
5475 if (buf == NULL)
5476 return bpaddr;
5477 buf_len = bpaddr - boundary;
5478 i = 0;
5479 }
5480 }
5481 else
5482 {
5483 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5484 if (buf == NULL)
5485 return bpaddr;
5486 buf_len = bpaddr - boundary;
5487 i = 0;
5488 }
5489
5490 /* Scan forwards. Find the last IT instruction before BPADDR. */
5491 last_it = -1;
5492 last_it_count = 0;
5493 while (i < buf_len)
5494 {
5495 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5496 last_it_count--;
5497 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5498 {
5499 last_it = i;
5500 if (inst1 & 0x0001)
5501 last_it_count = 4;
5502 else if (inst1 & 0x0002)
5503 last_it_count = 3;
5504 else if (inst1 & 0x0004)
5505 last_it_count = 2;
5506 else
5507 last_it_count = 1;
5508 }
5509 i += thumb_insn_size (inst1);
5510 }
5511
5512 xfree (buf);
5513
5514 if (last_it == -1)
5515 /* There wasn't really an IT instruction after all. */
5516 return bpaddr;
5517
5518 if (last_it_count < 1)
5519 /* It was too far away. */
5520 return bpaddr;
5521
5522 /* This really is a trouble spot. Move the breakpoint to the IT
5523 instruction. */
5524 return bpaddr - buf_len + last_it;
5525 }
5526
5527 /* ARM displaced stepping support.
5528
5529 Generally ARM displaced stepping works as follows:
5530
5531 1. When an instruction is to be single-stepped, it is first decoded by
5532 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5533 Depending on the type of instruction, it is then copied to a scratch
5534 location, possibly in a modified form. The copy_* set of functions
5535 performs such modification, as necessary. A breakpoint is placed after
5536 the modified instruction in the scratch space to return control to GDB.
5537 Note in particular that instructions which modify the PC will no longer
5538 do so after modification.
5539
5540 2. The instruction is single-stepped, by setting the PC to the scratch
5541 location address, and resuming. Control returns to GDB when the
5542 breakpoint is hit.
5543
5544 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5545 function used for the current instruction. This function's job is to
5546 put the CPU/memory state back to what it would have been if the
5547 instruction had been executed unmodified in its original location. */
5548
5549 /* NOP instruction (mov r0, r0). */
5550 #define ARM_NOP 0xe1a00000
5551 #define THUMB_NOP 0x4600
5552
5553 /* Helper for register reads for displaced stepping. In particular, this
5554 returns the PC as it would be seen by the instruction at its original
5555 location. */
5556
5557 ULONGEST
5558 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5559 int regno)
5560 {
5561 ULONGEST ret;
5562 CORE_ADDR from = dsc->insn_addr;
5563
5564 if (regno == ARM_PC_REGNUM)
5565 {
5566 /* Compute pipeline offset:
5567 - When executing an ARM instruction, PC reads as the address of the
5568 current instruction plus 8.
5569 - When executing a Thumb instruction, PC reads as the address of the
5570 current instruction plus 4. */
5571
5572 if (!dsc->is_thumb)
5573 from += 8;
5574 else
5575 from += 4;
5576
5577 if (debug_displaced)
5578 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5579 (unsigned long) from);
5580 return (ULONGEST) from;
5581 }
5582 else
5583 {
5584 regcache_cooked_read_unsigned (regs, regno, &ret);
5585 if (debug_displaced)
5586 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5587 regno, (unsigned long) ret);
5588 return ret;
5589 }
5590 }
5591
5592 static int
5593 displaced_in_arm_mode (struct regcache *regs)
5594 {
5595 ULONGEST ps;
5596 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5597
5598 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5599
5600 return (ps & t_bit) == 0;
5601 }
5602
5603 /* Write to the PC as from a branch instruction. */
5604
5605 static void
5606 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5607 ULONGEST val)
5608 {
5609 if (!dsc->is_thumb)
5610 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5611 architecture versions < 6. */
5612 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5613 val & ~(ULONGEST) 0x3);
5614 else
5615 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5616 val & ~(ULONGEST) 0x1);
5617 }
5618
5619 /* Write to the PC as from a branch-exchange instruction. */
5620
5621 static void
5622 bx_write_pc (struct regcache *regs, ULONGEST val)
5623 {
5624 ULONGEST ps;
5625 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5626
5627 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5628
5629 if ((val & 1) == 1)
5630 {
5631 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5632 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5633 }
5634 else if ((val & 2) == 0)
5635 {
5636 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5637 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5638 }
5639 else
5640 {
5641 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5642 mode, align dest to 4 bytes). */
5643 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5644 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5645 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5646 }
5647 }
5648
5649 /* Write to the PC as if from a load instruction. */
5650
5651 static void
5652 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5653 ULONGEST val)
5654 {
5655 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5656 bx_write_pc (regs, val);
5657 else
5658 branch_write_pc (regs, dsc, val);
5659 }
5660
5661 /* Write to the PC as if from an ALU instruction. */
5662
5663 static void
5664 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5665 ULONGEST val)
5666 {
5667 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5668 bx_write_pc (regs, val);
5669 else
5670 branch_write_pc (regs, dsc, val);
5671 }
5672
5673 /* Helper for writing to registers for displaced stepping. Writing to the PC
5674 has a varying effects depending on the instruction which does the write:
5675 this is controlled by the WRITE_PC argument. */
5676
5677 void
5678 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5679 int regno, ULONGEST val, enum pc_write_style write_pc)
5680 {
5681 if (regno == ARM_PC_REGNUM)
5682 {
5683 if (debug_displaced)
5684 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5685 (unsigned long) val);
5686 switch (write_pc)
5687 {
5688 case BRANCH_WRITE_PC:
5689 branch_write_pc (regs, dsc, val);
5690 break;
5691
5692 case BX_WRITE_PC:
5693 bx_write_pc (regs, val);
5694 break;
5695
5696 case LOAD_WRITE_PC:
5697 load_write_pc (regs, dsc, val);
5698 break;
5699
5700 case ALU_WRITE_PC:
5701 alu_write_pc (regs, dsc, val);
5702 break;
5703
5704 case CANNOT_WRITE_PC:
5705 warning (_("Instruction wrote to PC in an unexpected way when "
5706 "single-stepping"));
5707 break;
5708
5709 default:
5710 internal_error (__FILE__, __LINE__,
5711 _("Invalid argument to displaced_write_reg"));
5712 }
5713
5714 dsc->wrote_to_pc = 1;
5715 }
5716 else
5717 {
5718 if (debug_displaced)
5719 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5720 regno, (unsigned long) val);
5721 regcache_cooked_write_unsigned (regs, regno, val);
5722 }
5723 }
5724
5725 /* This function is used to concisely determine if an instruction INSN
5726 references PC. Register fields of interest in INSN should have the
5727 corresponding fields of BITMASK set to 0b1111. The function
5728 returns return 1 if any of these fields in INSN reference the PC
5729 (also 0b1111, r15), else it returns 0. */
5730
5731 static int
5732 insn_references_pc (uint32_t insn, uint32_t bitmask)
5733 {
5734 uint32_t lowbit = 1;
5735
5736 while (bitmask != 0)
5737 {
5738 uint32_t mask;
5739
5740 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5741 ;
5742
5743 if (!lowbit)
5744 break;
5745
5746 mask = lowbit * 0xf;
5747
5748 if ((insn & mask) == mask)
5749 return 1;
5750
5751 bitmask &= ~mask;
5752 }
5753
5754 return 0;
5755 }
5756
5757 /* The simplest copy function. Many instructions have the same effect no
5758 matter what address they are executed at: in those cases, use this. */
5759
5760 static int
5761 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5762 const char *iname, struct displaced_step_closure *dsc)
5763 {
5764 if (debug_displaced)
5765 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5766 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5767 iname);
5768
5769 dsc->modinsn[0] = insn;
5770
5771 return 0;
5772 }
5773
5774 static int
5775 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5776 uint16_t insn2, const char *iname,
5777 struct displaced_step_closure *dsc)
5778 {
5779 if (debug_displaced)
5780 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5781 "opcode/class '%s' unmodified\n", insn1, insn2,
5782 iname);
5783
5784 dsc->modinsn[0] = insn1;
5785 dsc->modinsn[1] = insn2;
5786 dsc->numinsns = 2;
5787
5788 return 0;
5789 }
5790
5791 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5792 modification. */
5793 static int
5794 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5795 const char *iname,
5796 struct displaced_step_closure *dsc)
5797 {
5798 if (debug_displaced)
5799 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5800 "opcode/class '%s' unmodified\n", insn,
5801 iname);
5802
5803 dsc->modinsn[0] = insn;
5804
5805 return 0;
5806 }
5807
5808 /* Preload instructions with immediate offset. */
5809
5810 static void
5811 cleanup_preload (struct gdbarch *gdbarch,
5812 struct regcache *regs, struct displaced_step_closure *dsc)
5813 {
5814 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5815 if (!dsc->u.preload.immed)
5816 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5817 }
5818
5819 static void
5820 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5821 struct displaced_step_closure *dsc, unsigned int rn)
5822 {
5823 ULONGEST rn_val;
5824 /* Preload instructions:
5825
5826 {pli/pld} [rn, #+/-imm]
5827 ->
5828 {pli/pld} [r0, #+/-imm]. */
5829
5830 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5831 rn_val = displaced_read_reg (regs, dsc, rn);
5832 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5833 dsc->u.preload.immed = 1;
5834
5835 dsc->cleanup = &cleanup_preload;
5836 }
5837
5838 static int
5839 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5840 struct displaced_step_closure *dsc)
5841 {
5842 unsigned int rn = bits (insn, 16, 19);
5843
5844 if (!insn_references_pc (insn, 0x000f0000ul))
5845 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5846
5847 if (debug_displaced)
5848 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5849 (unsigned long) insn);
5850
5851 dsc->modinsn[0] = insn & 0xfff0ffff;
5852
5853 install_preload (gdbarch, regs, dsc, rn);
5854
5855 return 0;
5856 }
5857
5858 static int
5859 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5860 struct regcache *regs, struct displaced_step_closure *dsc)
5861 {
5862 unsigned int rn = bits (insn1, 0, 3);
5863 unsigned int u_bit = bit (insn1, 7);
5864 int imm12 = bits (insn2, 0, 11);
5865 ULONGEST pc_val;
5866
5867 if (rn != ARM_PC_REGNUM)
5868 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5869
5870 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5871 PLD (literal) Encoding T1. */
5872 if (debug_displaced)
5873 fprintf_unfiltered (gdb_stdlog,
5874 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5875 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5876 imm12);
5877
5878 if (!u_bit)
5879 imm12 = -1 * imm12;
5880
5881 /* Rewrite instruction {pli/pld} PC imm12 into:
5882 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5883
5884 {pli/pld} [r0, r1]
5885
5886 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5887
5888 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5889 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5890
5891 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5892
5893 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5894 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5895 dsc->u.preload.immed = 0;
5896
5897 /* {pli/pld} [r0, r1] */
5898 dsc->modinsn[0] = insn1 & 0xfff0;
5899 dsc->modinsn[1] = 0xf001;
5900 dsc->numinsns = 2;
5901
5902 dsc->cleanup = &cleanup_preload;
5903 return 0;
5904 }
5905
5906 /* Preload instructions with register offset. */
5907
5908 static void
5909 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5910 struct displaced_step_closure *dsc, unsigned int rn,
5911 unsigned int rm)
5912 {
5913 ULONGEST rn_val, rm_val;
5914
5915 /* Preload register-offset instructions:
5916
5917 {pli/pld} [rn, rm {, shift}]
5918 ->
5919 {pli/pld} [r0, r1 {, shift}]. */
5920
5921 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5922 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5923 rn_val = displaced_read_reg (regs, dsc, rn);
5924 rm_val = displaced_read_reg (regs, dsc, rm);
5925 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5926 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5927 dsc->u.preload.immed = 0;
5928
5929 dsc->cleanup = &cleanup_preload;
5930 }
5931
5932 static int
5933 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5934 struct regcache *regs,
5935 struct displaced_step_closure *dsc)
5936 {
5937 unsigned int rn = bits (insn, 16, 19);
5938 unsigned int rm = bits (insn, 0, 3);
5939
5940
5941 if (!insn_references_pc (insn, 0x000f000ful))
5942 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5943
5944 if (debug_displaced)
5945 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5946 (unsigned long) insn);
5947
5948 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5949
5950 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5951 return 0;
5952 }
5953
5954 /* Copy/cleanup coprocessor load and store instructions. */
5955
5956 static void
5957 cleanup_copro_load_store (struct gdbarch *gdbarch,
5958 struct regcache *regs,
5959 struct displaced_step_closure *dsc)
5960 {
5961 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5962
5963 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5964
5965 if (dsc->u.ldst.writeback)
5966 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5967 }
5968
5969 static void
5970 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5971 struct displaced_step_closure *dsc,
5972 int writeback, unsigned int rn)
5973 {
5974 ULONGEST rn_val;
5975
5976 /* Coprocessor load/store instructions:
5977
5978 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5979 ->
5980 {stc/stc2} [r0, #+/-imm].
5981
5982 ldc/ldc2 are handled identically. */
5983
5984 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5985 rn_val = displaced_read_reg (regs, dsc, rn);
5986 /* PC should be 4-byte aligned. */
5987 rn_val = rn_val & 0xfffffffc;
5988 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5989
5990 dsc->u.ldst.writeback = writeback;
5991 dsc->u.ldst.rn = rn;
5992
5993 dsc->cleanup = &cleanup_copro_load_store;
5994 }
5995
5996 static int
5997 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5998 struct regcache *regs,
5999 struct displaced_step_closure *dsc)
6000 {
6001 unsigned int rn = bits (insn, 16, 19);
6002
6003 if (!insn_references_pc (insn, 0x000f0000ul))
6004 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
6005
6006 if (debug_displaced)
6007 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6008 "load/store insn %.8lx\n", (unsigned long) insn);
6009
6010 dsc->modinsn[0] = insn & 0xfff0ffff;
6011
6012 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
6013
6014 return 0;
6015 }
6016
6017 static int
6018 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
6019 uint16_t insn2, struct regcache *regs,
6020 struct displaced_step_closure *dsc)
6021 {
6022 unsigned int rn = bits (insn1, 0, 3);
6023
6024 if (rn != ARM_PC_REGNUM)
6025 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6026 "copro load/store", dsc);
6027
6028 if (debug_displaced)
6029 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6030 "load/store insn %.4x%.4x\n", insn1, insn2);
6031
6032 dsc->modinsn[0] = insn1 & 0xfff0;
6033 dsc->modinsn[1] = insn2;
6034 dsc->numinsns = 2;
6035
6036 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6037 doesn't support writeback, so pass 0. */
6038 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6039
6040 return 0;
6041 }
6042
6043 /* Clean up branch instructions (actually perform the branch, by setting
6044 PC). */
6045
6046 static void
6047 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6048 struct displaced_step_closure *dsc)
6049 {
6050 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6051 int branch_taken = condition_true (dsc->u.branch.cond, status);
6052 enum pc_write_style write_pc = dsc->u.branch.exchange
6053 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6054
6055 if (!branch_taken)
6056 return;
6057
6058 if (dsc->u.branch.link)
6059 {
6060 /* The value of LR should be the next insn of current one. In order
6061 not to confuse logic hanlding later insn `bx lr', if current insn mode
6062 is Thumb, the bit 0 of LR value should be set to 1. */
6063 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6064
6065 if (dsc->is_thumb)
6066 next_insn_addr |= 0x1;
6067
6068 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6069 CANNOT_WRITE_PC);
6070 }
6071
6072 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6073 }
6074
6075 /* Copy B/BL/BLX instructions with immediate destinations. */
6076
6077 static void
6078 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6079 struct displaced_step_closure *dsc,
6080 unsigned int cond, int exchange, int link, long offset)
6081 {
6082 /* Implement "BL<cond> <label>" as:
6083
6084 Preparation: cond <- instruction condition
6085 Insn: mov r0, r0 (nop)
6086 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6087
6088 B<cond> similar, but don't set r14 in cleanup. */
6089
6090 dsc->u.branch.cond = cond;
6091 dsc->u.branch.link = link;
6092 dsc->u.branch.exchange = exchange;
6093
6094 dsc->u.branch.dest = dsc->insn_addr;
6095 if (link && exchange)
6096 /* For BLX, offset is computed from the Align (PC, 4). */
6097 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6098
6099 if (dsc->is_thumb)
6100 dsc->u.branch.dest += 4 + offset;
6101 else
6102 dsc->u.branch.dest += 8 + offset;
6103
6104 dsc->cleanup = &cleanup_branch;
6105 }
6106 static int
6107 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6108 struct regcache *regs, struct displaced_step_closure *dsc)
6109 {
6110 unsigned int cond = bits (insn, 28, 31);
6111 int exchange = (cond == 0xf);
6112 int link = exchange || bit (insn, 24);
6113 long offset;
6114
6115 if (debug_displaced)
6116 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6117 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6118 (unsigned long) insn);
6119 if (exchange)
6120 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6121 then arrange the switch into Thumb mode. */
6122 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6123 else
6124 offset = bits (insn, 0, 23) << 2;
6125
6126 if (bit (offset, 25))
6127 offset = offset | ~0x3ffffff;
6128
6129 dsc->modinsn[0] = ARM_NOP;
6130
6131 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6132 return 0;
6133 }
6134
6135 static int
6136 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6137 uint16_t insn2, struct regcache *regs,
6138 struct displaced_step_closure *dsc)
6139 {
6140 int link = bit (insn2, 14);
6141 int exchange = link && !bit (insn2, 12);
6142 int cond = INST_AL;
6143 long offset = 0;
6144 int j1 = bit (insn2, 13);
6145 int j2 = bit (insn2, 11);
6146 int s = sbits (insn1, 10, 10);
6147 int i1 = !(j1 ^ bit (insn1, 10));
6148 int i2 = !(j2 ^ bit (insn1, 10));
6149
6150 if (!link && !exchange) /* B */
6151 {
6152 offset = (bits (insn2, 0, 10) << 1);
6153 if (bit (insn2, 12)) /* Encoding T4 */
6154 {
6155 offset |= (bits (insn1, 0, 9) << 12)
6156 | (i2 << 22)
6157 | (i1 << 23)
6158 | (s << 24);
6159 cond = INST_AL;
6160 }
6161 else /* Encoding T3 */
6162 {
6163 offset |= (bits (insn1, 0, 5) << 12)
6164 | (j1 << 18)
6165 | (j2 << 19)
6166 | (s << 20);
6167 cond = bits (insn1, 6, 9);
6168 }
6169 }
6170 else
6171 {
6172 offset = (bits (insn1, 0, 9) << 12);
6173 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6174 offset |= exchange ?
6175 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6176 }
6177
6178 if (debug_displaced)
6179 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6180 "%.4x %.4x with offset %.8lx\n",
6181 link ? (exchange) ? "blx" : "bl" : "b",
6182 insn1, insn2, offset);
6183
6184 dsc->modinsn[0] = THUMB_NOP;
6185
6186 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6187 return 0;
6188 }
6189
6190 /* Copy B Thumb instructions. */
6191 static int
6192 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6193 struct displaced_step_closure *dsc)
6194 {
6195 unsigned int cond = 0;
6196 int offset = 0;
6197 unsigned short bit_12_15 = bits (insn, 12, 15);
6198 CORE_ADDR from = dsc->insn_addr;
6199
6200 if (bit_12_15 == 0xd)
6201 {
6202 /* offset = SignExtend (imm8:0, 32) */
6203 offset = sbits ((insn << 1), 0, 8);
6204 cond = bits (insn, 8, 11);
6205 }
6206 else if (bit_12_15 == 0xe) /* Encoding T2 */
6207 {
6208 offset = sbits ((insn << 1), 0, 11);
6209 cond = INST_AL;
6210 }
6211
6212 if (debug_displaced)
6213 fprintf_unfiltered (gdb_stdlog,
6214 "displaced: copying b immediate insn %.4x "
6215 "with offset %d\n", insn, offset);
6216
6217 dsc->u.branch.cond = cond;
6218 dsc->u.branch.link = 0;
6219 dsc->u.branch.exchange = 0;
6220 dsc->u.branch.dest = from + 4 + offset;
6221
6222 dsc->modinsn[0] = THUMB_NOP;
6223
6224 dsc->cleanup = &cleanup_branch;
6225
6226 return 0;
6227 }
6228
6229 /* Copy BX/BLX with register-specified destinations. */
6230
6231 static void
6232 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6233 struct displaced_step_closure *dsc, int link,
6234 unsigned int cond, unsigned int rm)
6235 {
6236 /* Implement {BX,BLX}<cond> <reg>" as:
6237
6238 Preparation: cond <- instruction condition
6239 Insn: mov r0, r0 (nop)
6240 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6241
6242 Don't set r14 in cleanup for BX. */
6243
6244 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6245
6246 dsc->u.branch.cond = cond;
6247 dsc->u.branch.link = link;
6248
6249 dsc->u.branch.exchange = 1;
6250
6251 dsc->cleanup = &cleanup_branch;
6252 }
6253
6254 static int
6255 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6256 struct regcache *regs, struct displaced_step_closure *dsc)
6257 {
6258 unsigned int cond = bits (insn, 28, 31);
6259 /* BX: x12xxx1x
6260 BLX: x12xxx3x. */
6261 int link = bit (insn, 5);
6262 unsigned int rm = bits (insn, 0, 3);
6263
6264 if (debug_displaced)
6265 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6266 (unsigned long) insn);
6267
6268 dsc->modinsn[0] = ARM_NOP;
6269
6270 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6271 return 0;
6272 }
6273
6274 static int
6275 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6276 struct regcache *regs,
6277 struct displaced_step_closure *dsc)
6278 {
6279 int link = bit (insn, 7);
6280 unsigned int rm = bits (insn, 3, 6);
6281
6282 if (debug_displaced)
6283 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6284 (unsigned short) insn);
6285
6286 dsc->modinsn[0] = THUMB_NOP;
6287
6288 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6289
6290 return 0;
6291 }
6292
6293
6294 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6295
6296 static void
6297 cleanup_alu_imm (struct gdbarch *gdbarch,
6298 struct regcache *regs, struct displaced_step_closure *dsc)
6299 {
6300 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6301 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6302 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6303 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6304 }
6305
6306 static int
6307 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6308 struct displaced_step_closure *dsc)
6309 {
6310 unsigned int rn = bits (insn, 16, 19);
6311 unsigned int rd = bits (insn, 12, 15);
6312 unsigned int op = bits (insn, 21, 24);
6313 int is_mov = (op == 0xd);
6314 ULONGEST rd_val, rn_val;
6315
6316 if (!insn_references_pc (insn, 0x000ff000ul))
6317 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6318
6319 if (debug_displaced)
6320 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6321 "%.8lx\n", is_mov ? "move" : "ALU",
6322 (unsigned long) insn);
6323
6324 /* Instruction is of form:
6325
6326 <op><cond> rd, [rn,] #imm
6327
6328 Rewrite as:
6329
6330 Preparation: tmp1, tmp2 <- r0, r1;
6331 r0, r1 <- rd, rn
6332 Insn: <op><cond> r0, r1, #imm
6333 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6334 */
6335
6336 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6337 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6338 rn_val = displaced_read_reg (regs, dsc, rn);
6339 rd_val = displaced_read_reg (regs, dsc, rd);
6340 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6341 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6342 dsc->rd = rd;
6343
6344 if (is_mov)
6345 dsc->modinsn[0] = insn & 0xfff00fff;
6346 else
6347 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6348
6349 dsc->cleanup = &cleanup_alu_imm;
6350
6351 return 0;
6352 }
6353
6354 static int
6355 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6356 uint16_t insn2, struct regcache *regs,
6357 struct displaced_step_closure *dsc)
6358 {
6359 unsigned int op = bits (insn1, 5, 8);
6360 unsigned int rn, rm, rd;
6361 ULONGEST rd_val, rn_val;
6362
6363 rn = bits (insn1, 0, 3); /* Rn */
6364 rm = bits (insn2, 0, 3); /* Rm */
6365 rd = bits (insn2, 8, 11); /* Rd */
6366
6367 /* This routine is only called for instruction MOV. */
6368 gdb_assert (op == 0x2 && rn == 0xf);
6369
6370 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6371 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6372
6373 if (debug_displaced)
6374 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6375 "ALU", insn1, insn2);
6376
6377 /* Instruction is of form:
6378
6379 <op><cond> rd, [rn,] #imm
6380
6381 Rewrite as:
6382
6383 Preparation: tmp1, tmp2 <- r0, r1;
6384 r0, r1 <- rd, rn
6385 Insn: <op><cond> r0, r1, #imm
6386 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6387 */
6388
6389 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6390 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6391 rn_val = displaced_read_reg (regs, dsc, rn);
6392 rd_val = displaced_read_reg (regs, dsc, rd);
6393 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6394 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6395 dsc->rd = rd;
6396
6397 dsc->modinsn[0] = insn1;
6398 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6399 dsc->numinsns = 2;
6400
6401 dsc->cleanup = &cleanup_alu_imm;
6402
6403 return 0;
6404 }
6405
6406 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6407
6408 static void
6409 cleanup_alu_reg (struct gdbarch *gdbarch,
6410 struct regcache *regs, struct displaced_step_closure *dsc)
6411 {
6412 ULONGEST rd_val;
6413 int i;
6414
6415 rd_val = displaced_read_reg (regs, dsc, 0);
6416
6417 for (i = 0; i < 3; i++)
6418 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6419
6420 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6421 }
6422
6423 static void
6424 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6425 struct displaced_step_closure *dsc,
6426 unsigned int rd, unsigned int rn, unsigned int rm)
6427 {
6428 ULONGEST rd_val, rn_val, rm_val;
6429
6430 /* Instruction is of form:
6431
6432 <op><cond> rd, [rn,] rm [, <shift>]
6433
6434 Rewrite as:
6435
6436 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6437 r0, r1, r2 <- rd, rn, rm
6438 Insn: <op><cond> r0, r1, r2 [, <shift>]
6439 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6440 */
6441
6442 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6443 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6444 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6445 rd_val = displaced_read_reg (regs, dsc, rd);
6446 rn_val = displaced_read_reg (regs, dsc, rn);
6447 rm_val = displaced_read_reg (regs, dsc, rm);
6448 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6449 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6450 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6451 dsc->rd = rd;
6452
6453 dsc->cleanup = &cleanup_alu_reg;
6454 }
6455
6456 static int
6457 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6458 struct displaced_step_closure *dsc)
6459 {
6460 unsigned int op = bits (insn, 21, 24);
6461 int is_mov = (op == 0xd);
6462
6463 if (!insn_references_pc (insn, 0x000ff00ful))
6464 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6465
6466 if (debug_displaced)
6467 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6468 is_mov ? "move" : "ALU", (unsigned long) insn);
6469
6470 if (is_mov)
6471 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6472 else
6473 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6474
6475 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6476 bits (insn, 0, 3));
6477 return 0;
6478 }
6479
6480 static int
6481 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6482 struct regcache *regs,
6483 struct displaced_step_closure *dsc)
6484 {
6485 unsigned rn, rm, rd;
6486
6487 rd = bits (insn, 3, 6);
6488 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6489 rm = 2;
6490
6491 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6492 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6493
6494 if (debug_displaced)
6495 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6496 "ALU", (unsigned short) insn);
6497
6498 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6499
6500 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6501
6502 return 0;
6503 }
6504
6505 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6506
6507 static void
6508 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6509 struct regcache *regs,
6510 struct displaced_step_closure *dsc)
6511 {
6512 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6513 int i;
6514
6515 for (i = 0; i < 4; i++)
6516 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6517
6518 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6519 }
6520
6521 static void
6522 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6523 struct displaced_step_closure *dsc,
6524 unsigned int rd, unsigned int rn, unsigned int rm,
6525 unsigned rs)
6526 {
6527 int i;
6528 ULONGEST rd_val, rn_val, rm_val, rs_val;
6529
6530 /* Instruction is of form:
6531
6532 <op><cond> rd, [rn,] rm, <shift> rs
6533
6534 Rewrite as:
6535
6536 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6537 r0, r1, r2, r3 <- rd, rn, rm, rs
6538 Insn: <op><cond> r0, r1, r2, <shift> r3
6539 Cleanup: tmp5 <- r0
6540 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6541 rd <- tmp5
6542 */
6543
6544 for (i = 0; i < 4; i++)
6545 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6546
6547 rd_val = displaced_read_reg (regs, dsc, rd);
6548 rn_val = displaced_read_reg (regs, dsc, rn);
6549 rm_val = displaced_read_reg (regs, dsc, rm);
6550 rs_val = displaced_read_reg (regs, dsc, rs);
6551 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6552 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6553 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6554 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6555 dsc->rd = rd;
6556 dsc->cleanup = &cleanup_alu_shifted_reg;
6557 }
6558
6559 static int
6560 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6561 struct regcache *regs,
6562 struct displaced_step_closure *dsc)
6563 {
6564 unsigned int op = bits (insn, 21, 24);
6565 int is_mov = (op == 0xd);
6566 unsigned int rd, rn, rm, rs;
6567
6568 if (!insn_references_pc (insn, 0x000fff0ful))
6569 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6570
6571 if (debug_displaced)
6572 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6573 "%.8lx\n", is_mov ? "move" : "ALU",
6574 (unsigned long) insn);
6575
6576 rn = bits (insn, 16, 19);
6577 rm = bits (insn, 0, 3);
6578 rs = bits (insn, 8, 11);
6579 rd = bits (insn, 12, 15);
6580
6581 if (is_mov)
6582 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6583 else
6584 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6585
6586 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6587
6588 return 0;
6589 }
6590
6591 /* Clean up load instructions. */
6592
6593 static void
6594 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6595 struct displaced_step_closure *dsc)
6596 {
6597 ULONGEST rt_val, rt_val2 = 0, rn_val;
6598
6599 rt_val = displaced_read_reg (regs, dsc, 0);
6600 if (dsc->u.ldst.xfersize == 8)
6601 rt_val2 = displaced_read_reg (regs, dsc, 1);
6602 rn_val = displaced_read_reg (regs, dsc, 2);
6603
6604 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6605 if (dsc->u.ldst.xfersize > 4)
6606 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6607 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6608 if (!dsc->u.ldst.immed)
6609 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6610
6611 /* Handle register writeback. */
6612 if (dsc->u.ldst.writeback)
6613 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6614 /* Put result in right place. */
6615 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6616 if (dsc->u.ldst.xfersize == 8)
6617 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6618 }
6619
6620 /* Clean up store instructions. */
6621
6622 static void
6623 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6624 struct displaced_step_closure *dsc)
6625 {
6626 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6627
6628 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6629 if (dsc->u.ldst.xfersize > 4)
6630 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6631 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6632 if (!dsc->u.ldst.immed)
6633 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6634 if (!dsc->u.ldst.restore_r4)
6635 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6636
6637 /* Writeback. */
6638 if (dsc->u.ldst.writeback)
6639 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6640 }
6641
6642 /* Copy "extra" load/store instructions. These are halfword/doubleword
6643 transfers, which have a different encoding to byte/word transfers. */
6644
6645 static int
6646 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6647 struct regcache *regs, struct displaced_step_closure *dsc)
6648 {
6649 unsigned int op1 = bits (insn, 20, 24);
6650 unsigned int op2 = bits (insn, 5, 6);
6651 unsigned int rt = bits (insn, 12, 15);
6652 unsigned int rn = bits (insn, 16, 19);
6653 unsigned int rm = bits (insn, 0, 3);
6654 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6655 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6656 int immed = (op1 & 0x4) != 0;
6657 int opcode;
6658 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6659
6660 if (!insn_references_pc (insn, 0x000ff00ful))
6661 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6662
6663 if (debug_displaced)
6664 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6665 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6666 (unsigned long) insn);
6667
6668 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6669
6670 if (opcode < 0)
6671 internal_error (__FILE__, __LINE__,
6672 _("copy_extra_ld_st: instruction decode error"));
6673
6674 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6675 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6676 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6677 if (!immed)
6678 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6679
6680 rt_val = displaced_read_reg (regs, dsc, rt);
6681 if (bytesize[opcode] == 8)
6682 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6683 rn_val = displaced_read_reg (regs, dsc, rn);
6684 if (!immed)
6685 rm_val = displaced_read_reg (regs, dsc, rm);
6686
6687 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6688 if (bytesize[opcode] == 8)
6689 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6690 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6691 if (!immed)
6692 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6693
6694 dsc->rd = rt;
6695 dsc->u.ldst.xfersize = bytesize[opcode];
6696 dsc->u.ldst.rn = rn;
6697 dsc->u.ldst.immed = immed;
6698 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6699 dsc->u.ldst.restore_r4 = 0;
6700
6701 if (immed)
6702 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6703 ->
6704 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6705 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6706 else
6707 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6708 ->
6709 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6710 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6711
6712 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6713
6714 return 0;
6715 }
6716
6717 /* Copy byte/half word/word loads and stores. */
6718
6719 static void
6720 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6721 struct displaced_step_closure *dsc, int load,
6722 int immed, int writeback, int size, int usermode,
6723 int rt, int rm, int rn)
6724 {
6725 ULONGEST rt_val, rn_val, rm_val = 0;
6726
6727 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6728 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6729 if (!immed)
6730 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6731 if (!load)
6732 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6733
6734 rt_val = displaced_read_reg (regs, dsc, rt);
6735 rn_val = displaced_read_reg (regs, dsc, rn);
6736 if (!immed)
6737 rm_val = displaced_read_reg (regs, dsc, rm);
6738
6739 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6740 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6741 if (!immed)
6742 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6743 dsc->rd = rt;
6744 dsc->u.ldst.xfersize = size;
6745 dsc->u.ldst.rn = rn;
6746 dsc->u.ldst.immed = immed;
6747 dsc->u.ldst.writeback = writeback;
6748
6749 /* To write PC we can do:
6750
6751 Before this sequence of instructions:
6752 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6753 r2 is the Rn value got from dispalced_read_reg.
6754
6755 Insn1: push {pc} Write address of STR instruction + offset on stack
6756 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6757 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6758 = addr(Insn1) + offset - addr(Insn3) - 8
6759 = offset - 16
6760 Insn4: add r4, r4, #8 r4 = offset - 8
6761 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6762 = from + offset
6763 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6764
6765 Otherwise we don't know what value to write for PC, since the offset is
6766 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6767 of this can be found in Section "Saving from r15" in
6768 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6769
6770 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6771 }
6772
6773
6774 static int
6775 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6776 uint16_t insn2, struct regcache *regs,
6777 struct displaced_step_closure *dsc, int size)
6778 {
6779 unsigned int u_bit = bit (insn1, 7);
6780 unsigned int rt = bits (insn2, 12, 15);
6781 int imm12 = bits (insn2, 0, 11);
6782 ULONGEST pc_val;
6783
6784 if (debug_displaced)
6785 fprintf_unfiltered (gdb_stdlog,
6786 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6787 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6788 imm12);
6789
6790 if (!u_bit)
6791 imm12 = -1 * imm12;
6792
6793 /* Rewrite instruction LDR Rt imm12 into:
6794
6795 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6796
6797 LDR R0, R2, R3,
6798
6799 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6800
6801
6802 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6803 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6804 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6805
6806 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6807
6808 pc_val = pc_val & 0xfffffffc;
6809
6810 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6811 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6812
6813 dsc->rd = rt;
6814
6815 dsc->u.ldst.xfersize = size;
6816 dsc->u.ldst.immed = 0;
6817 dsc->u.ldst.writeback = 0;
6818 dsc->u.ldst.restore_r4 = 0;
6819
6820 /* LDR R0, R2, R3 */
6821 dsc->modinsn[0] = 0xf852;
6822 dsc->modinsn[1] = 0x3;
6823 dsc->numinsns = 2;
6824
6825 dsc->cleanup = &cleanup_load;
6826
6827 return 0;
6828 }
6829
6830 static int
6831 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6832 uint16_t insn2, struct regcache *regs,
6833 struct displaced_step_closure *dsc,
6834 int writeback, int immed)
6835 {
6836 unsigned int rt = bits (insn2, 12, 15);
6837 unsigned int rn = bits (insn1, 0, 3);
6838 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6839 /* In LDR (register), there is also a register Rm, which is not allowed to
6840 be PC, so we don't have to check it. */
6841
6842 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6843 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6844 dsc);
6845
6846 if (debug_displaced)
6847 fprintf_unfiltered (gdb_stdlog,
6848 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6849 rt, rn, insn1, insn2);
6850
6851 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6852 0, rt, rm, rn);
6853
6854 dsc->u.ldst.restore_r4 = 0;
6855
6856 if (immed)
6857 /* ldr[b]<cond> rt, [rn, #imm], etc.
6858 ->
6859 ldr[b]<cond> r0, [r2, #imm]. */
6860 {
6861 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6862 dsc->modinsn[1] = insn2 & 0x0fff;
6863 }
6864 else
6865 /* ldr[b]<cond> rt, [rn, rm], etc.
6866 ->
6867 ldr[b]<cond> r0, [r2, r3]. */
6868 {
6869 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6870 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6871 }
6872
6873 dsc->numinsns = 2;
6874
6875 return 0;
6876 }
6877
6878
6879 static int
6880 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6881 struct regcache *regs,
6882 struct displaced_step_closure *dsc,
6883 int load, int size, int usermode)
6884 {
6885 int immed = !bit (insn, 25);
6886 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6887 unsigned int rt = bits (insn, 12, 15);
6888 unsigned int rn = bits (insn, 16, 19);
6889 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6890
6891 if (!insn_references_pc (insn, 0x000ff00ful))
6892 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6893
6894 if (debug_displaced)
6895 fprintf_unfiltered (gdb_stdlog,
6896 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6897 load ? (size == 1 ? "ldrb" : "ldr")
6898 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6899 rt, rn,
6900 (unsigned long) insn);
6901
6902 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6903 usermode, rt, rm, rn);
6904
6905 if (load || rt != ARM_PC_REGNUM)
6906 {
6907 dsc->u.ldst.restore_r4 = 0;
6908
6909 if (immed)
6910 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6911 ->
6912 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6913 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6914 else
6915 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6916 ->
6917 {ldr,str}[b]<cond> r0, [r2, r3]. */
6918 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6919 }
6920 else
6921 {
6922 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6923 dsc->u.ldst.restore_r4 = 1;
6924 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6925 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6926 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6927 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6928 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6929
6930 /* As above. */
6931 if (immed)
6932 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6933 else
6934 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6935
6936 dsc->numinsns = 6;
6937 }
6938
6939 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6940
6941 return 0;
6942 }
6943
6944 /* Cleanup LDM instructions with fully-populated register list. This is an
6945 unfortunate corner case: it's impossible to implement correctly by modifying
6946 the instruction. The issue is as follows: we have an instruction,
6947
6948 ldm rN, {r0-r15}
6949
6950 which we must rewrite to avoid loading PC. A possible solution would be to
6951 do the load in two halves, something like (with suitable cleanup
6952 afterwards):
6953
6954 mov r8, rN
6955 ldm[id][ab] r8!, {r0-r7}
6956 str r7, <temp>
6957 ldm[id][ab] r8, {r7-r14}
6958 <bkpt>
6959
6960 but at present there's no suitable place for <temp>, since the scratch space
6961 is overwritten before the cleanup routine is called. For now, we simply
6962 emulate the instruction. */
6963
6964 static void
6965 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6966 struct displaced_step_closure *dsc)
6967 {
6968 int inc = dsc->u.block.increment;
6969 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6970 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6971 uint32_t regmask = dsc->u.block.regmask;
6972 int regno = inc ? 0 : 15;
6973 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6974 int exception_return = dsc->u.block.load && dsc->u.block.user
6975 && (regmask & 0x8000) != 0;
6976 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6977 int do_transfer = condition_true (dsc->u.block.cond, status);
6978 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6979
6980 if (!do_transfer)
6981 return;
6982
6983 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6984 sensible we can do here. Complain loudly. */
6985 if (exception_return)
6986 error (_("Cannot single-step exception return"));
6987
6988 /* We don't handle any stores here for now. */
6989 gdb_assert (dsc->u.block.load != 0);
6990
6991 if (debug_displaced)
6992 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6993 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6994 dsc->u.block.increment ? "inc" : "dec",
6995 dsc->u.block.before ? "before" : "after");
6996
6997 while (regmask)
6998 {
6999 uint32_t memword;
7000
7001 if (inc)
7002 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
7003 regno++;
7004 else
7005 while (regno >= 0 && (regmask & (1 << regno)) == 0)
7006 regno--;
7007
7008 xfer_addr += bump_before;
7009
7010 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
7011 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
7012
7013 xfer_addr += bump_after;
7014
7015 regmask &= ~(1 << regno);
7016 }
7017
7018 if (dsc->u.block.writeback)
7019 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
7020 CANNOT_WRITE_PC);
7021 }
7022
7023 /* Clean up an STM which included the PC in the register list. */
7024
7025 static void
7026 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7027 struct displaced_step_closure *dsc)
7028 {
7029 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7030 int store_executed = condition_true (dsc->u.block.cond, status);
7031 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7032 CORE_ADDR stm_insn_addr;
7033 uint32_t pc_val;
7034 long offset;
7035 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7036
7037 /* If condition code fails, there's nothing else to do. */
7038 if (!store_executed)
7039 return;
7040
7041 if (dsc->u.block.increment)
7042 {
7043 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7044
7045 if (dsc->u.block.before)
7046 pc_stored_at += 4;
7047 }
7048 else
7049 {
7050 pc_stored_at = dsc->u.block.xfer_addr;
7051
7052 if (dsc->u.block.before)
7053 pc_stored_at -= 4;
7054 }
7055
7056 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7057 stm_insn_addr = dsc->scratch_base;
7058 offset = pc_val - stm_insn_addr;
7059
7060 if (debug_displaced)
7061 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7062 "STM instruction\n", offset);
7063
7064 /* Rewrite the stored PC to the proper value for the non-displaced original
7065 instruction. */
7066 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7067 dsc->insn_addr + offset);
7068 }
7069
7070 /* Clean up an LDM which includes the PC in the register list. We clumped all
7071 the registers in the transferred list into a contiguous range r0...rX (to
7072 avoid loading PC directly and losing control of the debugged program), so we
7073 must undo that here. */
7074
7075 static void
7076 cleanup_block_load_pc (struct gdbarch *gdbarch,
7077 struct regcache *regs,
7078 struct displaced_step_closure *dsc)
7079 {
7080 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7081 int load_executed = condition_true (dsc->u.block.cond, status);
7082 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7083 unsigned int regs_loaded = bitcount (mask);
7084 unsigned int num_to_shuffle = regs_loaded, clobbered;
7085
7086 /* The method employed here will fail if the register list is fully populated
7087 (we need to avoid loading PC directly). */
7088 gdb_assert (num_to_shuffle < 16);
7089
7090 if (!load_executed)
7091 return;
7092
7093 clobbered = (1 << num_to_shuffle) - 1;
7094
7095 while (num_to_shuffle > 0)
7096 {
7097 if ((mask & (1 << write_reg)) != 0)
7098 {
7099 unsigned int read_reg = num_to_shuffle - 1;
7100
7101 if (read_reg != write_reg)
7102 {
7103 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7104 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7105 if (debug_displaced)
7106 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7107 "loaded register r%d to r%d\n"), read_reg,
7108 write_reg);
7109 }
7110 else if (debug_displaced)
7111 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7112 "r%d already in the right place\n"),
7113 write_reg);
7114
7115 clobbered &= ~(1 << write_reg);
7116
7117 num_to_shuffle--;
7118 }
7119
7120 write_reg--;
7121 }
7122
7123 /* Restore any registers we scribbled over. */
7124 for (write_reg = 0; clobbered != 0; write_reg++)
7125 {
7126 if ((clobbered & (1 << write_reg)) != 0)
7127 {
7128 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7129 CANNOT_WRITE_PC);
7130 if (debug_displaced)
7131 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7132 "clobbered register r%d\n"), write_reg);
7133 clobbered &= ~(1 << write_reg);
7134 }
7135 }
7136
7137 /* Perform register writeback manually. */
7138 if (dsc->u.block.writeback)
7139 {
7140 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7141
7142 if (dsc->u.block.increment)
7143 new_rn_val += regs_loaded * 4;
7144 else
7145 new_rn_val -= regs_loaded * 4;
7146
7147 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7148 CANNOT_WRITE_PC);
7149 }
7150 }
7151
7152 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7153 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7154
7155 static int
7156 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7157 struct regcache *regs,
7158 struct displaced_step_closure *dsc)
7159 {
7160 int load = bit (insn, 20);
7161 int user = bit (insn, 22);
7162 int increment = bit (insn, 23);
7163 int before = bit (insn, 24);
7164 int writeback = bit (insn, 21);
7165 int rn = bits (insn, 16, 19);
7166
7167 /* Block transfers which don't mention PC can be run directly
7168 out-of-line. */
7169 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7170 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7171
7172 if (rn == ARM_PC_REGNUM)
7173 {
7174 warning (_("displaced: Unpredictable LDM or STM with "
7175 "base register r15"));
7176 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7177 }
7178
7179 if (debug_displaced)
7180 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7181 "%.8lx\n", (unsigned long) insn);
7182
7183 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7184 dsc->u.block.rn = rn;
7185
7186 dsc->u.block.load = load;
7187 dsc->u.block.user = user;
7188 dsc->u.block.increment = increment;
7189 dsc->u.block.before = before;
7190 dsc->u.block.writeback = writeback;
7191 dsc->u.block.cond = bits (insn, 28, 31);
7192
7193 dsc->u.block.regmask = insn & 0xffff;
7194
7195 if (load)
7196 {
7197 if ((insn & 0xffff) == 0xffff)
7198 {
7199 /* LDM with a fully-populated register list. This case is
7200 particularly tricky. Implement for now by fully emulating the
7201 instruction (which might not behave perfectly in all cases, but
7202 these instructions should be rare enough for that not to matter
7203 too much). */
7204 dsc->modinsn[0] = ARM_NOP;
7205
7206 dsc->cleanup = &cleanup_block_load_all;
7207 }
7208 else
7209 {
7210 /* LDM of a list of registers which includes PC. Implement by
7211 rewriting the list of registers to be transferred into a
7212 contiguous chunk r0...rX before doing the transfer, then shuffling
7213 registers into the correct places in the cleanup routine. */
7214 unsigned int regmask = insn & 0xffff;
7215 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7216 unsigned int to = 0, from = 0, i, new_rn;
7217
7218 for (i = 0; i < num_in_list; i++)
7219 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7220
7221 /* Writeback makes things complicated. We need to avoid clobbering
7222 the base register with one of the registers in our modified
7223 register list, but just using a different register can't work in
7224 all cases, e.g.:
7225
7226 ldm r14!, {r0-r13,pc}
7227
7228 which would need to be rewritten as:
7229
7230 ldm rN!, {r0-r14}
7231
7232 but that can't work, because there's no free register for N.
7233
7234 Solve this by turning off the writeback bit, and emulating
7235 writeback manually in the cleanup routine. */
7236
7237 if (writeback)
7238 insn &= ~(1 << 21);
7239
7240 new_regmask = (1 << num_in_list) - 1;
7241
7242 if (debug_displaced)
7243 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7244 "{..., pc}: original reg list %.4x, modified "
7245 "list %.4x\n"), rn, writeback ? "!" : "",
7246 (int) insn & 0xffff, new_regmask);
7247
7248 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7249
7250 dsc->cleanup = &cleanup_block_load_pc;
7251 }
7252 }
7253 else
7254 {
7255 /* STM of a list of registers which includes PC. Run the instruction
7256 as-is, but out of line: this will store the wrong value for the PC,
7257 so we must manually fix up the memory in the cleanup routine.
7258 Doing things this way has the advantage that we can auto-detect
7259 the offset of the PC write (which is architecture-dependent) in
7260 the cleanup routine. */
7261 dsc->modinsn[0] = insn;
7262
7263 dsc->cleanup = &cleanup_block_store_pc;
7264 }
7265
7266 return 0;
7267 }
7268
7269 static int
7270 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7271 struct regcache *regs,
7272 struct displaced_step_closure *dsc)
7273 {
7274 int rn = bits (insn1, 0, 3);
7275 int load = bit (insn1, 4);
7276 int writeback = bit (insn1, 5);
7277
7278 /* Block transfers which don't mention PC can be run directly
7279 out-of-line. */
7280 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7281 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7282
7283 if (rn == ARM_PC_REGNUM)
7284 {
7285 warning (_("displaced: Unpredictable LDM or STM with "
7286 "base register r15"));
7287 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7288 "unpredictable ldm/stm", dsc);
7289 }
7290
7291 if (debug_displaced)
7292 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7293 "%.4x%.4x\n", insn1, insn2);
7294
7295 /* Clear bit 13, since it should be always zero. */
7296 dsc->u.block.regmask = (insn2 & 0xdfff);
7297 dsc->u.block.rn = rn;
7298
7299 dsc->u.block.load = load;
7300 dsc->u.block.user = 0;
7301 dsc->u.block.increment = bit (insn1, 7);
7302 dsc->u.block.before = bit (insn1, 8);
7303 dsc->u.block.writeback = writeback;
7304 dsc->u.block.cond = INST_AL;
7305 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7306
7307 if (load)
7308 {
7309 if (dsc->u.block.regmask == 0xffff)
7310 {
7311 /* This branch is impossible to happen. */
7312 gdb_assert (0);
7313 }
7314 else
7315 {
7316 unsigned int regmask = dsc->u.block.regmask;
7317 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7318 unsigned int to = 0, from = 0, i, new_rn;
7319
7320 for (i = 0; i < num_in_list; i++)
7321 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7322
7323 if (writeback)
7324 insn1 &= ~(1 << 5);
7325
7326 new_regmask = (1 << num_in_list) - 1;
7327
7328 if (debug_displaced)
7329 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7330 "{..., pc}: original reg list %.4x, modified "
7331 "list %.4x\n"), rn, writeback ? "!" : "",
7332 (int) dsc->u.block.regmask, new_regmask);
7333
7334 dsc->modinsn[0] = insn1;
7335 dsc->modinsn[1] = (new_regmask & 0xffff);
7336 dsc->numinsns = 2;
7337
7338 dsc->cleanup = &cleanup_block_load_pc;
7339 }
7340 }
7341 else
7342 {
7343 dsc->modinsn[0] = insn1;
7344 dsc->modinsn[1] = insn2;
7345 dsc->numinsns = 2;
7346 dsc->cleanup = &cleanup_block_store_pc;
7347 }
7348 return 0;
7349 }
7350
7351 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7352 for Linux, where some SVC instructions must be treated specially. */
7353
7354 static void
7355 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7356 struct displaced_step_closure *dsc)
7357 {
7358 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7359
7360 if (debug_displaced)
7361 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7362 "%.8lx\n", (unsigned long) resume_addr);
7363
7364 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7365 }
7366
7367
7368 /* Common copy routine for svc instruciton. */
7369
7370 static int
7371 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7372 struct displaced_step_closure *dsc)
7373 {
7374 /* Preparation: none.
7375 Insn: unmodified svc.
7376 Cleanup: pc <- insn_addr + insn_size. */
7377
7378 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7379 instruction. */
7380 dsc->wrote_to_pc = 1;
7381
7382 /* Allow OS-specific code to override SVC handling. */
7383 if (dsc->u.svc.copy_svc_os)
7384 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7385 else
7386 {
7387 dsc->cleanup = &cleanup_svc;
7388 return 0;
7389 }
7390 }
7391
7392 static int
7393 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7394 struct regcache *regs, struct displaced_step_closure *dsc)
7395 {
7396
7397 if (debug_displaced)
7398 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7399 (unsigned long) insn);
7400
7401 dsc->modinsn[0] = insn;
7402
7403 return install_svc (gdbarch, regs, dsc);
7404 }
7405
7406 static int
7407 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7408 struct regcache *regs, struct displaced_step_closure *dsc)
7409 {
7410
7411 if (debug_displaced)
7412 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7413 insn);
7414
7415 dsc->modinsn[0] = insn;
7416
7417 return install_svc (gdbarch, regs, dsc);
7418 }
7419
7420 /* Copy undefined instructions. */
7421
7422 static int
7423 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7424 struct displaced_step_closure *dsc)
7425 {
7426 if (debug_displaced)
7427 fprintf_unfiltered (gdb_stdlog,
7428 "displaced: copying undefined insn %.8lx\n",
7429 (unsigned long) insn);
7430
7431 dsc->modinsn[0] = insn;
7432
7433 return 0;
7434 }
7435
7436 static int
7437 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7438 struct displaced_step_closure *dsc)
7439 {
7440
7441 if (debug_displaced)
7442 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7443 "%.4x %.4x\n", (unsigned short) insn1,
7444 (unsigned short) insn2);
7445
7446 dsc->modinsn[0] = insn1;
7447 dsc->modinsn[1] = insn2;
7448 dsc->numinsns = 2;
7449
7450 return 0;
7451 }
7452
7453 /* Copy unpredictable instructions. */
7454
7455 static int
7456 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7457 struct displaced_step_closure *dsc)
7458 {
7459 if (debug_displaced)
7460 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7461 "%.8lx\n", (unsigned long) insn);
7462
7463 dsc->modinsn[0] = insn;
7464
7465 return 0;
7466 }
7467
7468 /* The decode_* functions are instruction decoding helpers. They mostly follow
7469 the presentation in the ARM ARM. */
7470
7471 static int
7472 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7473 struct regcache *regs,
7474 struct displaced_step_closure *dsc)
7475 {
7476 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7477 unsigned int rn = bits (insn, 16, 19);
7478
7479 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7480 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7481 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7482 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7483 else if ((op1 & 0x60) == 0x20)
7484 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7485 else if ((op1 & 0x71) == 0x40)
7486 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7487 dsc);
7488 else if ((op1 & 0x77) == 0x41)
7489 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7490 else if ((op1 & 0x77) == 0x45)
7491 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7492 else if ((op1 & 0x77) == 0x51)
7493 {
7494 if (rn != 0xf)
7495 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7496 else
7497 return arm_copy_unpred (gdbarch, insn, dsc);
7498 }
7499 else if ((op1 & 0x77) == 0x55)
7500 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7501 else if (op1 == 0x57)
7502 switch (op2)
7503 {
7504 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7505 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7506 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7507 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7508 default: return arm_copy_unpred (gdbarch, insn, dsc);
7509 }
7510 else if ((op1 & 0x63) == 0x43)
7511 return arm_copy_unpred (gdbarch, insn, dsc);
7512 else if ((op2 & 0x1) == 0x0)
7513 switch (op1 & ~0x80)
7514 {
7515 case 0x61:
7516 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7517 case 0x65:
7518 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7519 case 0x71: case 0x75:
7520 /* pld/pldw reg. */
7521 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7522 case 0x63: case 0x67: case 0x73: case 0x77:
7523 return arm_copy_unpred (gdbarch, insn, dsc);
7524 default:
7525 return arm_copy_undef (gdbarch, insn, dsc);
7526 }
7527 else
7528 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7529 }
7530
7531 static int
7532 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7533 struct regcache *regs,
7534 struct displaced_step_closure *dsc)
7535 {
7536 if (bit (insn, 27) == 0)
7537 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7538 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7539 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7540 {
7541 case 0x0: case 0x2:
7542 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7543
7544 case 0x1: case 0x3:
7545 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7546
7547 case 0x4: case 0x5: case 0x6: case 0x7:
7548 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7549
7550 case 0x8:
7551 switch ((insn & 0xe00000) >> 21)
7552 {
7553 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7554 /* stc/stc2. */
7555 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7556
7557 case 0x2:
7558 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7559
7560 default:
7561 return arm_copy_undef (gdbarch, insn, dsc);
7562 }
7563
7564 case 0x9:
7565 {
7566 int rn_f = (bits (insn, 16, 19) == 0xf);
7567 switch ((insn & 0xe00000) >> 21)
7568 {
7569 case 0x1: case 0x3:
7570 /* ldc/ldc2 imm (undefined for rn == pc). */
7571 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7572 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7573
7574 case 0x2:
7575 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7576
7577 case 0x4: case 0x5: case 0x6: case 0x7:
7578 /* ldc/ldc2 lit (undefined for rn != pc). */
7579 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7580 : arm_copy_undef (gdbarch, insn, dsc);
7581
7582 default:
7583 return arm_copy_undef (gdbarch, insn, dsc);
7584 }
7585 }
7586
7587 case 0xa:
7588 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7589
7590 case 0xb:
7591 if (bits (insn, 16, 19) == 0xf)
7592 /* ldc/ldc2 lit. */
7593 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7594 else
7595 return arm_copy_undef (gdbarch, insn, dsc);
7596
7597 case 0xc:
7598 if (bit (insn, 4))
7599 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7600 else
7601 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7602
7603 case 0xd:
7604 if (bit (insn, 4))
7605 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7606 else
7607 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7608
7609 default:
7610 return arm_copy_undef (gdbarch, insn, dsc);
7611 }
7612 }
7613
7614 /* Decode miscellaneous instructions in dp/misc encoding space. */
7615
7616 static int
7617 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7618 struct regcache *regs,
7619 struct displaced_step_closure *dsc)
7620 {
7621 unsigned int op2 = bits (insn, 4, 6);
7622 unsigned int op = bits (insn, 21, 22);
7623 unsigned int op1 = bits (insn, 16, 19);
7624
7625 switch (op2)
7626 {
7627 case 0x0:
7628 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7629
7630 case 0x1:
7631 if (op == 0x1) /* bx. */
7632 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7633 else if (op == 0x3)
7634 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7635 else
7636 return arm_copy_undef (gdbarch, insn, dsc);
7637
7638 case 0x2:
7639 if (op == 0x1)
7640 /* Not really supported. */
7641 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7642 else
7643 return arm_copy_undef (gdbarch, insn, dsc);
7644
7645 case 0x3:
7646 if (op == 0x1)
7647 return arm_copy_bx_blx_reg (gdbarch, insn,
7648 regs, dsc); /* blx register. */
7649 else
7650 return arm_copy_undef (gdbarch, insn, dsc);
7651
7652 case 0x5:
7653 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7654
7655 case 0x7:
7656 if (op == 0x1)
7657 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7658 else if (op == 0x3)
7659 /* Not really supported. */
7660 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7661
7662 default:
7663 return arm_copy_undef (gdbarch, insn, dsc);
7664 }
7665 }
7666
7667 static int
7668 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7669 struct regcache *regs,
7670 struct displaced_step_closure *dsc)
7671 {
7672 if (bit (insn, 25))
7673 switch (bits (insn, 20, 24))
7674 {
7675 case 0x10:
7676 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7677
7678 case 0x14:
7679 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7680
7681 case 0x12: case 0x16:
7682 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7683
7684 default:
7685 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7686 }
7687 else
7688 {
7689 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7690
7691 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7692 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7693 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7694 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7695 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7696 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7697 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7698 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7699 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7700 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7701 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7702 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7703 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7704 /* 2nd arg means "unpriveleged". */
7705 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7706 dsc);
7707 }
7708
7709 /* Should be unreachable. */
7710 return 1;
7711 }
7712
7713 static int
7714 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7715 struct regcache *regs,
7716 struct displaced_step_closure *dsc)
7717 {
7718 int a = bit (insn, 25), b = bit (insn, 4);
7719 uint32_t op1 = bits (insn, 20, 24);
7720 int rn_f = bits (insn, 16, 19) == 0xf;
7721
7722 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7723 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7724 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7725 else if ((!a && (op1 & 0x17) == 0x02)
7726 || (a && (op1 & 0x17) == 0x02 && !b))
7727 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7728 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7729 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7730 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7731 else if ((!a && (op1 & 0x17) == 0x03)
7732 || (a && (op1 & 0x17) == 0x03 && !b))
7733 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7734 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7735 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7736 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7737 else if ((!a && (op1 & 0x17) == 0x06)
7738 || (a && (op1 & 0x17) == 0x06 && !b))
7739 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7740 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7741 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7742 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7743 else if ((!a && (op1 & 0x17) == 0x07)
7744 || (a && (op1 & 0x17) == 0x07 && !b))
7745 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7746
7747 /* Should be unreachable. */
7748 return 1;
7749 }
7750
7751 static int
7752 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7753 struct displaced_step_closure *dsc)
7754 {
7755 switch (bits (insn, 20, 24))
7756 {
7757 case 0x00: case 0x01: case 0x02: case 0x03:
7758 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7759
7760 case 0x04: case 0x05: case 0x06: case 0x07:
7761 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7762
7763 case 0x08: case 0x09: case 0x0a: case 0x0b:
7764 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7765 return arm_copy_unmodified (gdbarch, insn,
7766 "decode/pack/unpack/saturate/reverse", dsc);
7767
7768 case 0x18:
7769 if (bits (insn, 5, 7) == 0) /* op2. */
7770 {
7771 if (bits (insn, 12, 15) == 0xf)
7772 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7773 else
7774 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7775 }
7776 else
7777 return arm_copy_undef (gdbarch, insn, dsc);
7778
7779 case 0x1a: case 0x1b:
7780 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7781 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7782 else
7783 return arm_copy_undef (gdbarch, insn, dsc);
7784
7785 case 0x1c: case 0x1d:
7786 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7787 {
7788 if (bits (insn, 0, 3) == 0xf)
7789 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7790 else
7791 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7792 }
7793 else
7794 return arm_copy_undef (gdbarch, insn, dsc);
7795
7796 case 0x1e: case 0x1f:
7797 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7798 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7799 else
7800 return arm_copy_undef (gdbarch, insn, dsc);
7801 }
7802
7803 /* Should be unreachable. */
7804 return 1;
7805 }
7806
7807 static int
7808 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7809 struct regcache *regs,
7810 struct displaced_step_closure *dsc)
7811 {
7812 if (bit (insn, 25))
7813 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7814 else
7815 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7816 }
7817
7818 static int
7819 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7820 struct regcache *regs,
7821 struct displaced_step_closure *dsc)
7822 {
7823 unsigned int opcode = bits (insn, 20, 24);
7824
7825 switch (opcode)
7826 {
7827 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7828 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7829
7830 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7831 case 0x12: case 0x16:
7832 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7833
7834 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7835 case 0x13: case 0x17:
7836 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7837
7838 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7839 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7840 /* Note: no writeback for these instructions. Bit 25 will always be
7841 zero though (via caller), so the following works OK. */
7842 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7843 }
7844
7845 /* Should be unreachable. */
7846 return 1;
7847 }
7848
7849 /* Decode shifted register instructions. */
7850
7851 static int
7852 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7853 uint16_t insn2, struct regcache *regs,
7854 struct displaced_step_closure *dsc)
7855 {
7856 /* PC is only allowed to be used in instruction MOV. */
7857
7858 unsigned int op = bits (insn1, 5, 8);
7859 unsigned int rn = bits (insn1, 0, 3);
7860
7861 if (op == 0x2 && rn == 0xf) /* MOV */
7862 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7863 else
7864 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7865 "dp (shift reg)", dsc);
7866 }
7867
7868
7869 /* Decode extension register load/store. Exactly the same as
7870 arm_decode_ext_reg_ld_st. */
7871
7872 static int
7873 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7874 uint16_t insn2, struct regcache *regs,
7875 struct displaced_step_closure *dsc)
7876 {
7877 unsigned int opcode = bits (insn1, 4, 8);
7878
7879 switch (opcode)
7880 {
7881 case 0x04: case 0x05:
7882 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7883 "vfp/neon vmov", dsc);
7884
7885 case 0x08: case 0x0c: /* 01x00 */
7886 case 0x0a: case 0x0e: /* 01x10 */
7887 case 0x12: case 0x16: /* 10x10 */
7888 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7889 "vfp/neon vstm/vpush", dsc);
7890
7891 case 0x09: case 0x0d: /* 01x01 */
7892 case 0x0b: case 0x0f: /* 01x11 */
7893 case 0x13: case 0x17: /* 10x11 */
7894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7895 "vfp/neon vldm/vpop", dsc);
7896
7897 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7898 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7899 "vstr", dsc);
7900 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7901 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7902 }
7903
7904 /* Should be unreachable. */
7905 return 1;
7906 }
7907
7908 static int
7909 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7910 struct regcache *regs, struct displaced_step_closure *dsc)
7911 {
7912 unsigned int op1 = bits (insn, 20, 25);
7913 int op = bit (insn, 4);
7914 unsigned int coproc = bits (insn, 8, 11);
7915 unsigned int rn = bits (insn, 16, 19);
7916
7917 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7918 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7919 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7920 && (coproc & 0xe) != 0xa)
7921 /* stc/stc2. */
7922 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7923 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7924 && (coproc & 0xe) != 0xa)
7925 /* ldc/ldc2 imm/lit. */
7926 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7927 else if ((op1 & 0x3e) == 0x00)
7928 return arm_copy_undef (gdbarch, insn, dsc);
7929 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7930 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7931 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7932 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7933 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7934 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7935 else if ((op1 & 0x30) == 0x20 && !op)
7936 {
7937 if ((coproc & 0xe) == 0xa)
7938 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7939 else
7940 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7941 }
7942 else if ((op1 & 0x30) == 0x20 && op)
7943 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7944 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7945 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7946 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7947 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7948 else if ((op1 & 0x30) == 0x30)
7949 return arm_copy_svc (gdbarch, insn, regs, dsc);
7950 else
7951 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7952 }
7953
7954 static int
7955 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7956 uint16_t insn2, struct regcache *regs,
7957 struct displaced_step_closure *dsc)
7958 {
7959 unsigned int coproc = bits (insn2, 8, 11);
7960 unsigned int op1 = bits (insn1, 4, 9);
7961 unsigned int bit_5_8 = bits (insn1, 5, 8);
7962 unsigned int bit_9 = bit (insn1, 9);
7963 unsigned int bit_4 = bit (insn1, 4);
7964 unsigned int rn = bits (insn1, 0, 3);
7965
7966 if (bit_9 == 0)
7967 {
7968 if (bit_5_8 == 2)
7969 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7970 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7971 dsc);
7972 else if (bit_5_8 == 0) /* UNDEFINED. */
7973 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7974 else
7975 {
7976 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7977 if ((coproc & 0xe) == 0xa)
7978 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7979 dsc);
7980 else /* coproc is not 101x. */
7981 {
7982 if (bit_4 == 0) /* STC/STC2. */
7983 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7984 "stc/stc2", dsc);
7985 else /* LDC/LDC2 {literal, immeidate}. */
7986 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7987 regs, dsc);
7988 }
7989 }
7990 }
7991 else
7992 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7993
7994 return 0;
7995 }
7996
7997 static void
7998 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7999 struct displaced_step_closure *dsc, int rd)
8000 {
8001 /* ADR Rd, #imm
8002
8003 Rewrite as:
8004
8005 Preparation: Rd <- PC
8006 Insn: ADD Rd, #imm
8007 Cleanup: Null.
8008 */
8009
8010 /* Rd <- PC */
8011 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8012 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
8013 }
8014
8015 static int
8016 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
8017 struct displaced_step_closure *dsc,
8018 int rd, unsigned int imm)
8019 {
8020
8021 /* Encoding T2: ADDS Rd, #imm */
8022 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8023
8024 install_pc_relative (gdbarch, regs, dsc, rd);
8025
8026 return 0;
8027 }
8028
8029 static int
8030 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8031 struct regcache *regs,
8032 struct displaced_step_closure *dsc)
8033 {
8034 unsigned int rd = bits (insn, 8, 10);
8035 unsigned int imm8 = bits (insn, 0, 7);
8036
8037 if (debug_displaced)
8038 fprintf_unfiltered (gdb_stdlog,
8039 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8040 rd, imm8, insn);
8041
8042 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8043 }
8044
8045 static int
8046 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8047 uint16_t insn2, struct regcache *regs,
8048 struct displaced_step_closure *dsc)
8049 {
8050 unsigned int rd = bits (insn2, 8, 11);
8051 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8052 extract raw immediate encoding rather than computing immediate. When
8053 generating ADD or SUB instruction, we can simply perform OR operation to
8054 set immediate into ADD. */
8055 unsigned int imm_3_8 = insn2 & 0x70ff;
8056 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8057
8058 if (debug_displaced)
8059 fprintf_unfiltered (gdb_stdlog,
8060 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8061 rd, imm_i, imm_3_8, insn1, insn2);
8062
8063 if (bit (insn1, 7)) /* Encoding T2 */
8064 {
8065 /* Encoding T3: SUB Rd, Rd, #imm */
8066 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8067 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8068 }
8069 else /* Encoding T3 */
8070 {
8071 /* Encoding T3: ADD Rd, Rd, #imm */
8072 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8073 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8074 }
8075 dsc->numinsns = 2;
8076
8077 install_pc_relative (gdbarch, regs, dsc, rd);
8078
8079 return 0;
8080 }
8081
8082 static int
8083 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8084 struct regcache *regs,
8085 struct displaced_step_closure *dsc)
8086 {
8087 unsigned int rt = bits (insn1, 8, 10);
8088 unsigned int pc;
8089 int imm8 = (bits (insn1, 0, 7) << 2);
8090 CORE_ADDR from = dsc->insn_addr;
8091
8092 /* LDR Rd, #imm8
8093
8094 Rwrite as:
8095
8096 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8097
8098 Insn: LDR R0, [R2, R3];
8099 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8100
8101 if (debug_displaced)
8102 fprintf_unfiltered (gdb_stdlog,
8103 "displaced: copying thumb ldr r%d [pc #%d]\n"
8104 , rt, imm8);
8105
8106 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8107 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8108 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8109 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8110 /* The assembler calculates the required value of the offset from the
8111 Align(PC,4) value of this instruction to the label. */
8112 pc = pc & 0xfffffffc;
8113
8114 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8115 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8116
8117 dsc->rd = rt;
8118 dsc->u.ldst.xfersize = 4;
8119 dsc->u.ldst.rn = 0;
8120 dsc->u.ldst.immed = 0;
8121 dsc->u.ldst.writeback = 0;
8122 dsc->u.ldst.restore_r4 = 0;
8123
8124 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8125
8126 dsc->cleanup = &cleanup_load;
8127
8128 return 0;
8129 }
8130
8131 /* Copy Thumb cbnz/cbz insruction. */
8132
8133 static int
8134 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8135 struct regcache *regs,
8136 struct displaced_step_closure *dsc)
8137 {
8138 int non_zero = bit (insn1, 11);
8139 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8140 CORE_ADDR from = dsc->insn_addr;
8141 int rn = bits (insn1, 0, 2);
8142 int rn_val = displaced_read_reg (regs, dsc, rn);
8143
8144 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8145 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8146 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8147 condition is false, let it be, cleanup_branch will do nothing. */
8148 if (dsc->u.branch.cond)
8149 {
8150 dsc->u.branch.cond = INST_AL;
8151 dsc->u.branch.dest = from + 4 + imm5;
8152 }
8153 else
8154 dsc->u.branch.dest = from + 2;
8155
8156 dsc->u.branch.link = 0;
8157 dsc->u.branch.exchange = 0;
8158
8159 if (debug_displaced)
8160 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8161 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8162 rn, rn_val, insn1, dsc->u.branch.dest);
8163
8164 dsc->modinsn[0] = THUMB_NOP;
8165
8166 dsc->cleanup = &cleanup_branch;
8167 return 0;
8168 }
8169
8170 /* Copy Table Branch Byte/Halfword */
8171 static int
8172 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8173 uint16_t insn2, struct regcache *regs,
8174 struct displaced_step_closure *dsc)
8175 {
8176 ULONGEST rn_val, rm_val;
8177 int is_tbh = bit (insn2, 4);
8178 CORE_ADDR halfwords = 0;
8179 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8180
8181 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8182 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8183
8184 if (is_tbh)
8185 {
8186 gdb_byte buf[2];
8187
8188 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8189 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8190 }
8191 else
8192 {
8193 gdb_byte buf[1];
8194
8195 target_read_memory (rn_val + rm_val, buf, 1);
8196 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8197 }
8198
8199 if (debug_displaced)
8200 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8201 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8202 (unsigned int) rn_val, (unsigned int) rm_val,
8203 (unsigned int) halfwords);
8204
8205 dsc->u.branch.cond = INST_AL;
8206 dsc->u.branch.link = 0;
8207 dsc->u.branch.exchange = 0;
8208 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8209
8210 dsc->cleanup = &cleanup_branch;
8211
8212 return 0;
8213 }
8214
8215 static void
8216 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8217 struct displaced_step_closure *dsc)
8218 {
8219 /* PC <- r7 */
8220 int val = displaced_read_reg (regs, dsc, 7);
8221 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8222
8223 /* r7 <- r8 */
8224 val = displaced_read_reg (regs, dsc, 8);
8225 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8226
8227 /* r8 <- tmp[0] */
8228 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8229
8230 }
8231
8232 static int
8233 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8234 struct regcache *regs,
8235 struct displaced_step_closure *dsc)
8236 {
8237 dsc->u.block.regmask = insn1 & 0x00ff;
8238
8239 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8240 to :
8241
8242 (1) register list is full, that is, r0-r7 are used.
8243 Prepare: tmp[0] <- r8
8244
8245 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8246 MOV r8, r7; Move value of r7 to r8;
8247 POP {r7}; Store PC value into r7.
8248
8249 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8250
8251 (2) register list is not full, supposing there are N registers in
8252 register list (except PC, 0 <= N <= 7).
8253 Prepare: for each i, 0 - N, tmp[i] <- ri.
8254
8255 POP {r0, r1, ...., rN};
8256
8257 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8258 from tmp[] properly.
8259 */
8260 if (debug_displaced)
8261 fprintf_unfiltered (gdb_stdlog,
8262 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8263 dsc->u.block.regmask, insn1);
8264
8265 if (dsc->u.block.regmask == 0xff)
8266 {
8267 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8268
8269 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8270 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8271 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8272
8273 dsc->numinsns = 3;
8274 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8275 }
8276 else
8277 {
8278 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8279 unsigned int new_regmask, bit = 1;
8280 unsigned int to = 0, from = 0, i, new_rn;
8281
8282 for (i = 0; i < num_in_list + 1; i++)
8283 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8284
8285 new_regmask = (1 << (num_in_list + 1)) - 1;
8286
8287 if (debug_displaced)
8288 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8289 "{..., pc}: original reg list %.4x,"
8290 " modified list %.4x\n"),
8291 (int) dsc->u.block.regmask, new_regmask);
8292
8293 dsc->u.block.regmask |= 0x8000;
8294 dsc->u.block.writeback = 0;
8295 dsc->u.block.cond = INST_AL;
8296
8297 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8298
8299 dsc->cleanup = &cleanup_block_load_pc;
8300 }
8301
8302 return 0;
8303 }
8304
8305 static void
8306 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8307 struct regcache *regs,
8308 struct displaced_step_closure *dsc)
8309 {
8310 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8311 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8312 int err = 0;
8313
8314 /* 16-bit thumb instructions. */
8315 switch (op_bit_12_15)
8316 {
8317 /* Shift (imme), add, subtract, move and compare. */
8318 case 0: case 1: case 2: case 3:
8319 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8320 "shift/add/sub/mov/cmp",
8321 dsc);
8322 break;
8323 case 4:
8324 switch (op_bit_10_11)
8325 {
8326 case 0: /* Data-processing */
8327 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8328 "data-processing",
8329 dsc);
8330 break;
8331 case 1: /* Special data instructions and branch and exchange. */
8332 {
8333 unsigned short op = bits (insn1, 7, 9);
8334 if (op == 6 || op == 7) /* BX or BLX */
8335 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8336 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8337 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8338 else
8339 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8340 dsc);
8341 }
8342 break;
8343 default: /* LDR (literal) */
8344 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8345 }
8346 break;
8347 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8349 break;
8350 case 10:
8351 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8352 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8353 else /* Generate SP-relative address */
8354 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8355 break;
8356 case 11: /* Misc 16-bit instructions */
8357 {
8358 switch (bits (insn1, 8, 11))
8359 {
8360 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8361 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8362 break;
8363 case 12: case 13: /* POP */
8364 if (bit (insn1, 8)) /* PC is in register list. */
8365 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8366 else
8367 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8368 break;
8369 case 15: /* If-Then, and hints */
8370 if (bits (insn1, 0, 3))
8371 /* If-Then makes up to four following instructions conditional.
8372 IT instruction itself is not conditional, so handle it as a
8373 common unmodified instruction. */
8374 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8375 dsc);
8376 else
8377 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8378 break;
8379 default:
8380 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8381 }
8382 }
8383 break;
8384 case 12:
8385 if (op_bit_10_11 < 2) /* Store multiple registers */
8386 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8387 else /* Load multiple registers */
8388 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8389 break;
8390 case 13: /* Conditional branch and supervisor call */
8391 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8392 err = thumb_copy_b (gdbarch, insn1, dsc);
8393 else
8394 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8395 break;
8396 case 14: /* Unconditional branch */
8397 err = thumb_copy_b (gdbarch, insn1, dsc);
8398 break;
8399 default:
8400 err = 1;
8401 }
8402
8403 if (err)
8404 internal_error (__FILE__, __LINE__,
8405 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8406 }
8407
8408 static int
8409 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8410 uint16_t insn1, uint16_t insn2,
8411 struct regcache *regs,
8412 struct displaced_step_closure *dsc)
8413 {
8414 int rt = bits (insn2, 12, 15);
8415 int rn = bits (insn1, 0, 3);
8416 int op1 = bits (insn1, 7, 8);
8417 int err = 0;
8418
8419 switch (bits (insn1, 5, 6))
8420 {
8421 case 0: /* Load byte and memory hints */
8422 if (rt == 0xf) /* PLD/PLI */
8423 {
8424 if (rn == 0xf)
8425 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8426 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8427 else
8428 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8429 "pli/pld", dsc);
8430 }
8431 else
8432 {
8433 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8434 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8435 1);
8436 else
8437 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8438 "ldrb{reg, immediate}/ldrbt",
8439 dsc);
8440 }
8441
8442 break;
8443 case 1: /* Load halfword and memory hints. */
8444 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8445 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8446 "pld/unalloc memhint", dsc);
8447 else
8448 {
8449 if (rn == 0xf)
8450 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8451 2);
8452 else
8453 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8454 "ldrh/ldrht", dsc);
8455 }
8456 break;
8457 case 2: /* Load word */
8458 {
8459 int insn2_bit_8_11 = bits (insn2, 8, 11);
8460
8461 if (rn == 0xf)
8462 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8463 else if (op1 == 0x1) /* Encoding T3 */
8464 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8465 0, 1);
8466 else /* op1 == 0x0 */
8467 {
8468 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8469 /* LDR (immediate) */
8470 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8471 dsc, bit (insn2, 8), 1);
8472 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8473 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8474 "ldrt", dsc);
8475 else
8476 /* LDR (register) */
8477 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8478 dsc, 0, 0);
8479 }
8480 break;
8481 }
8482 default:
8483 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8484 break;
8485 }
8486 return 0;
8487 }
8488
8489 static void
8490 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8491 uint16_t insn2, struct regcache *regs,
8492 struct displaced_step_closure *dsc)
8493 {
8494 int err = 0;
8495 unsigned short op = bit (insn2, 15);
8496 unsigned int op1 = bits (insn1, 11, 12);
8497
8498 switch (op1)
8499 {
8500 case 1:
8501 {
8502 switch (bits (insn1, 9, 10))
8503 {
8504 case 0:
8505 if (bit (insn1, 6))
8506 {
8507 /* Load/store {dual, execlusive}, table branch. */
8508 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8509 && bits (insn2, 5, 7) == 0)
8510 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8511 dsc);
8512 else
8513 /* PC is not allowed to use in load/store {dual, exclusive}
8514 instructions. */
8515 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8516 "load/store dual/ex", dsc);
8517 }
8518 else /* load/store multiple */
8519 {
8520 switch (bits (insn1, 7, 8))
8521 {
8522 case 0: case 3: /* SRS, RFE */
8523 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8524 "srs/rfe", dsc);
8525 break;
8526 case 1: case 2: /* LDM/STM/PUSH/POP */
8527 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8528 break;
8529 }
8530 }
8531 break;
8532
8533 case 1:
8534 /* Data-processing (shift register). */
8535 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8536 dsc);
8537 break;
8538 default: /* Coprocessor instructions. */
8539 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8540 break;
8541 }
8542 break;
8543 }
8544 case 2: /* op1 = 2 */
8545 if (op) /* Branch and misc control. */
8546 {
8547 if (bit (insn2, 14) /* BLX/BL */
8548 || bit (insn2, 12) /* Unconditional branch */
8549 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8550 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8551 else
8552 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8553 "misc ctrl", dsc);
8554 }
8555 else
8556 {
8557 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8558 {
8559 int op = bits (insn1, 4, 8);
8560 int rn = bits (insn1, 0, 3);
8561 if ((op == 0 || op == 0xa) && rn == 0xf)
8562 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8563 regs, dsc);
8564 else
8565 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8566 "dp/pb", dsc);
8567 }
8568 else /* Data processing (modified immeidate) */
8569 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8570 "dp/mi", dsc);
8571 }
8572 break;
8573 case 3: /* op1 = 3 */
8574 switch (bits (insn1, 9, 10))
8575 {
8576 case 0:
8577 if (bit (insn1, 4))
8578 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8579 regs, dsc);
8580 else /* NEON Load/Store and Store single data item */
8581 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8582 "neon elt/struct load/store",
8583 dsc);
8584 break;
8585 case 1: /* op1 = 3, bits (9, 10) == 1 */
8586 switch (bits (insn1, 7, 8))
8587 {
8588 case 0: case 1: /* Data processing (register) */
8589 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8590 "dp(reg)", dsc);
8591 break;
8592 case 2: /* Multiply and absolute difference */
8593 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8594 "mul/mua/diff", dsc);
8595 break;
8596 case 3: /* Long multiply and divide */
8597 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8598 "lmul/lmua", dsc);
8599 break;
8600 }
8601 break;
8602 default: /* Coprocessor instructions */
8603 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8604 break;
8605 }
8606 break;
8607 default:
8608 err = 1;
8609 }
8610
8611 if (err)
8612 internal_error (__FILE__, __LINE__,
8613 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8614
8615 }
8616
8617 static void
8618 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8619 CORE_ADDR to, struct regcache *regs,
8620 struct displaced_step_closure *dsc)
8621 {
8622 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8623 uint16_t insn1
8624 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8625
8626 if (debug_displaced)
8627 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8628 "at %.8lx\n", insn1, (unsigned long) from);
8629
8630 dsc->is_thumb = 1;
8631 dsc->insn_size = thumb_insn_size (insn1);
8632 if (thumb_insn_size (insn1) == 4)
8633 {
8634 uint16_t insn2
8635 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8636 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8637 }
8638 else
8639 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8640 }
8641
8642 void
8643 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8644 CORE_ADDR to, struct regcache *regs,
8645 struct displaced_step_closure *dsc)
8646 {
8647 int err = 0;
8648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8649 uint32_t insn;
8650
8651 /* Most displaced instructions use a 1-instruction scratch space, so set this
8652 here and override below if/when necessary. */
8653 dsc->numinsns = 1;
8654 dsc->insn_addr = from;
8655 dsc->scratch_base = to;
8656 dsc->cleanup = NULL;
8657 dsc->wrote_to_pc = 0;
8658
8659 if (!displaced_in_arm_mode (regs))
8660 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8661
8662 dsc->is_thumb = 0;
8663 dsc->insn_size = 4;
8664 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8665 if (debug_displaced)
8666 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8667 "at %.8lx\n", (unsigned long) insn,
8668 (unsigned long) from);
8669
8670 if ((insn & 0xf0000000) == 0xf0000000)
8671 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8672 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8673 {
8674 case 0x0: case 0x1: case 0x2: case 0x3:
8675 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8676 break;
8677
8678 case 0x4: case 0x5: case 0x6:
8679 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8680 break;
8681
8682 case 0x7:
8683 err = arm_decode_media (gdbarch, insn, dsc);
8684 break;
8685
8686 case 0x8: case 0x9: case 0xa: case 0xb:
8687 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8688 break;
8689
8690 case 0xc: case 0xd: case 0xe: case 0xf:
8691 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8692 break;
8693 }
8694
8695 if (err)
8696 internal_error (__FILE__, __LINE__,
8697 _("arm_process_displaced_insn: Instruction decode error"));
8698 }
8699
8700 /* Actually set up the scratch space for a displaced instruction. */
8701
8702 void
8703 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8704 CORE_ADDR to, struct displaced_step_closure *dsc)
8705 {
8706 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8707 unsigned int i, len, offset;
8708 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8709 int size = dsc->is_thumb? 2 : 4;
8710 const gdb_byte *bkp_insn;
8711
8712 offset = 0;
8713 /* Poke modified instruction(s). */
8714 for (i = 0; i < dsc->numinsns; i++)
8715 {
8716 if (debug_displaced)
8717 {
8718 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8719 if (size == 4)
8720 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8721 dsc->modinsn[i]);
8722 else if (size == 2)
8723 fprintf_unfiltered (gdb_stdlog, "%.4x",
8724 (unsigned short)dsc->modinsn[i]);
8725
8726 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8727 (unsigned long) to + offset);
8728
8729 }
8730 write_memory_unsigned_integer (to + offset, size,
8731 byte_order_for_code,
8732 dsc->modinsn[i]);
8733 offset += size;
8734 }
8735
8736 /* Choose the correct breakpoint instruction. */
8737 if (dsc->is_thumb)
8738 {
8739 bkp_insn = tdep->thumb_breakpoint;
8740 len = tdep->thumb_breakpoint_size;
8741 }
8742 else
8743 {
8744 bkp_insn = tdep->arm_breakpoint;
8745 len = tdep->arm_breakpoint_size;
8746 }
8747
8748 /* Put breakpoint afterwards. */
8749 write_memory (to + offset, bkp_insn, len);
8750
8751 if (debug_displaced)
8752 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8753 paddress (gdbarch, from), paddress (gdbarch, to));
8754 }
8755
8756 /* Entry point for copying an instruction into scratch space for displaced
8757 stepping. */
8758
8759 struct displaced_step_closure *
8760 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8761 CORE_ADDR from, CORE_ADDR to,
8762 struct regcache *regs)
8763 {
8764 struct displaced_step_closure *dsc
8765 = xmalloc (sizeof (struct displaced_step_closure));
8766 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8767 arm_displaced_init_closure (gdbarch, from, to, dsc);
8768
8769 return dsc;
8770 }
8771
8772 /* Entry point for cleaning things up after a displaced instruction has been
8773 single-stepped. */
8774
8775 void
8776 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8777 struct displaced_step_closure *dsc,
8778 CORE_ADDR from, CORE_ADDR to,
8779 struct regcache *regs)
8780 {
8781 if (dsc->cleanup)
8782 dsc->cleanup (gdbarch, regs, dsc);
8783
8784 if (!dsc->wrote_to_pc)
8785 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8786 dsc->insn_addr + dsc->insn_size);
8787
8788 }
8789
8790 #include "bfd-in2.h"
8791 #include "libcoff.h"
8792
8793 static int
8794 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8795 {
8796 struct gdbarch *gdbarch = info->application_data;
8797
8798 if (arm_pc_is_thumb (gdbarch, memaddr))
8799 {
8800 static asymbol *asym;
8801 static combined_entry_type ce;
8802 static struct coff_symbol_struct csym;
8803 static struct bfd fake_bfd;
8804 static bfd_target fake_target;
8805
8806 if (csym.native == NULL)
8807 {
8808 /* Create a fake symbol vector containing a Thumb symbol.
8809 This is solely so that the code in print_insn_little_arm()
8810 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8811 the presence of a Thumb symbol and switch to decoding
8812 Thumb instructions. */
8813
8814 fake_target.flavour = bfd_target_coff_flavour;
8815 fake_bfd.xvec = &fake_target;
8816 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8817 csym.native = &ce;
8818 csym.symbol.the_bfd = &fake_bfd;
8819 csym.symbol.name = "fake";
8820 asym = (asymbol *) & csym;
8821 }
8822
8823 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8824 info->symbols = &asym;
8825 }
8826 else
8827 info->symbols = NULL;
8828
8829 if (info->endian == BFD_ENDIAN_BIG)
8830 return print_insn_big_arm (memaddr, info);
8831 else
8832 return print_insn_little_arm (memaddr, info);
8833 }
8834
8835 /* The following define instruction sequences that will cause ARM
8836 cpu's to take an undefined instruction trap. These are used to
8837 signal a breakpoint to GDB.
8838
8839 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8840 modes. A different instruction is required for each mode. The ARM
8841 cpu's can also be big or little endian. Thus four different
8842 instructions are needed to support all cases.
8843
8844 Note: ARMv4 defines several new instructions that will take the
8845 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8846 not in fact add the new instructions. The new undefined
8847 instructions in ARMv4 are all instructions that had no defined
8848 behaviour in earlier chips. There is no guarantee that they will
8849 raise an exception, but may be treated as NOP's. In practice, it
8850 may only safe to rely on instructions matching:
8851
8852 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8853 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8854 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8855
8856 Even this may only true if the condition predicate is true. The
8857 following use a condition predicate of ALWAYS so it is always TRUE.
8858
8859 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8860 and NetBSD all use a software interrupt rather than an undefined
8861 instruction to force a trap. This can be handled by by the
8862 abi-specific code during establishment of the gdbarch vector. */
8863
8864 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8865 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8866 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8867 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8868
8869 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8870 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8871 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8872 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8873
8874 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8875 the program counter value to determine whether a 16-bit or 32-bit
8876 breakpoint should be used. It returns a pointer to a string of
8877 bytes that encode a breakpoint instruction, stores the length of
8878 the string to *lenptr, and adjusts the program counter (if
8879 necessary) to point to the actual memory location where the
8880 breakpoint should be inserted. */
8881
8882 static const unsigned char *
8883 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8884 {
8885 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8886 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8887
8888 if (arm_pc_is_thumb (gdbarch, *pcptr))
8889 {
8890 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8891
8892 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8893 check whether we are replacing a 32-bit instruction. */
8894 if (tdep->thumb2_breakpoint != NULL)
8895 {
8896 gdb_byte buf[2];
8897 if (target_read_memory (*pcptr, buf, 2) == 0)
8898 {
8899 unsigned short inst1;
8900 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8901 if (thumb_insn_size (inst1) == 4)
8902 {
8903 *lenptr = tdep->thumb2_breakpoint_size;
8904 return tdep->thumb2_breakpoint;
8905 }
8906 }
8907 }
8908
8909 *lenptr = tdep->thumb_breakpoint_size;
8910 return tdep->thumb_breakpoint;
8911 }
8912 else
8913 {
8914 *lenptr = tdep->arm_breakpoint_size;
8915 return tdep->arm_breakpoint;
8916 }
8917 }
8918
8919 static void
8920 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8921 int *kindptr)
8922 {
8923 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8924
8925 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8926 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8927 that this is not confused with a 32-bit ARM breakpoint. */
8928 *kindptr = 3;
8929 }
8930
8931 /* Extract from an array REGBUF containing the (raw) register state a
8932 function return value of type TYPE, and copy that, in virtual
8933 format, into VALBUF. */
8934
8935 static void
8936 arm_extract_return_value (struct type *type, struct regcache *regs,
8937 gdb_byte *valbuf)
8938 {
8939 struct gdbarch *gdbarch = get_regcache_arch (regs);
8940 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8941
8942 if (TYPE_CODE_FLT == TYPE_CODE (type))
8943 {
8944 switch (gdbarch_tdep (gdbarch)->fp_model)
8945 {
8946 case ARM_FLOAT_FPA:
8947 {
8948 /* The value is in register F0 in internal format. We need to
8949 extract the raw value and then convert it to the desired
8950 internal type. */
8951 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8952
8953 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8954 convert_from_extended (floatformat_from_type (type), tmpbuf,
8955 valbuf, gdbarch_byte_order (gdbarch));
8956 }
8957 break;
8958
8959 case ARM_FLOAT_SOFT_FPA:
8960 case ARM_FLOAT_SOFT_VFP:
8961 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8962 not using the VFP ABI code. */
8963 case ARM_FLOAT_VFP:
8964 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8965 if (TYPE_LENGTH (type) > 4)
8966 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8967 valbuf + INT_REGISTER_SIZE);
8968 break;
8969
8970 default:
8971 internal_error (__FILE__, __LINE__,
8972 _("arm_extract_return_value: "
8973 "Floating point model not supported"));
8974 break;
8975 }
8976 }
8977 else if (TYPE_CODE (type) == TYPE_CODE_INT
8978 || TYPE_CODE (type) == TYPE_CODE_CHAR
8979 || TYPE_CODE (type) == TYPE_CODE_BOOL
8980 || TYPE_CODE (type) == TYPE_CODE_PTR
8981 || TYPE_CODE (type) == TYPE_CODE_REF
8982 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8983 {
8984 /* If the type is a plain integer, then the access is
8985 straight-forward. Otherwise we have to play around a bit
8986 more. */
8987 int len = TYPE_LENGTH (type);
8988 int regno = ARM_A1_REGNUM;
8989 ULONGEST tmp;
8990
8991 while (len > 0)
8992 {
8993 /* By using store_unsigned_integer we avoid having to do
8994 anything special for small big-endian values. */
8995 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8996 store_unsigned_integer (valbuf,
8997 (len > INT_REGISTER_SIZE
8998 ? INT_REGISTER_SIZE : len),
8999 byte_order, tmp);
9000 len -= INT_REGISTER_SIZE;
9001 valbuf += INT_REGISTER_SIZE;
9002 }
9003 }
9004 else
9005 {
9006 /* For a structure or union the behaviour is as if the value had
9007 been stored to word-aligned memory and then loaded into
9008 registers with 32-bit load instruction(s). */
9009 int len = TYPE_LENGTH (type);
9010 int regno = ARM_A1_REGNUM;
9011 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9012
9013 while (len > 0)
9014 {
9015 regcache_cooked_read (regs, regno++, tmpbuf);
9016 memcpy (valbuf, tmpbuf,
9017 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9018 len -= INT_REGISTER_SIZE;
9019 valbuf += INT_REGISTER_SIZE;
9020 }
9021 }
9022 }
9023
9024
9025 /* Will a function return an aggregate type in memory or in a
9026 register? Return 0 if an aggregate type can be returned in a
9027 register, 1 if it must be returned in memory. */
9028
9029 static int
9030 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9031 {
9032 int nRc;
9033 enum type_code code;
9034
9035 CHECK_TYPEDEF (type);
9036
9037 /* In the ARM ABI, "integer" like aggregate types are returned in
9038 registers. For an aggregate type to be integer like, its size
9039 must be less than or equal to INT_REGISTER_SIZE and the
9040 offset of each addressable subfield must be zero. Note that bit
9041 fields are not addressable, and all addressable subfields of
9042 unions always start at offset zero.
9043
9044 This function is based on the behaviour of GCC 2.95.1.
9045 See: gcc/arm.c: arm_return_in_memory() for details.
9046
9047 Note: All versions of GCC before GCC 2.95.2 do not set up the
9048 parameters correctly for a function returning the following
9049 structure: struct { float f;}; This should be returned in memory,
9050 not a register. Richard Earnshaw sent me a patch, but I do not
9051 know of any way to detect if a function like the above has been
9052 compiled with the correct calling convention. */
9053
9054 /* All aggregate types that won't fit in a register must be returned
9055 in memory. */
9056 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9057 {
9058 return 1;
9059 }
9060
9061 /* The AAPCS says all aggregates not larger than a word are returned
9062 in a register. */
9063 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9064 return 0;
9065
9066 /* The only aggregate types that can be returned in a register are
9067 structs and unions. Arrays must be returned in memory. */
9068 code = TYPE_CODE (type);
9069 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9070 {
9071 return 1;
9072 }
9073
9074 /* Assume all other aggregate types can be returned in a register.
9075 Run a check for structures, unions and arrays. */
9076 nRc = 0;
9077
9078 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9079 {
9080 int i;
9081 /* Need to check if this struct/union is "integer" like. For
9082 this to be true, its size must be less than or equal to
9083 INT_REGISTER_SIZE and the offset of each addressable
9084 subfield must be zero. Note that bit fields are not
9085 addressable, and unions always start at offset zero. If any
9086 of the subfields is a floating point type, the struct/union
9087 cannot be an integer type. */
9088
9089 /* For each field in the object, check:
9090 1) Is it FP? --> yes, nRc = 1;
9091 2) Is it addressable (bitpos != 0) and
9092 not packed (bitsize == 0)?
9093 --> yes, nRc = 1
9094 */
9095
9096 for (i = 0; i < TYPE_NFIELDS (type); i++)
9097 {
9098 enum type_code field_type_code;
9099 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9100 i)));
9101
9102 /* Is it a floating point type field? */
9103 if (field_type_code == TYPE_CODE_FLT)
9104 {
9105 nRc = 1;
9106 break;
9107 }
9108
9109 /* If bitpos != 0, then we have to care about it. */
9110 if (TYPE_FIELD_BITPOS (type, i) != 0)
9111 {
9112 /* Bitfields are not addressable. If the field bitsize is
9113 zero, then the field is not packed. Hence it cannot be
9114 a bitfield or any other packed type. */
9115 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9116 {
9117 nRc = 1;
9118 break;
9119 }
9120 }
9121 }
9122 }
9123
9124 return nRc;
9125 }
9126
9127 /* Write into appropriate registers a function return value of type
9128 TYPE, given in virtual format. */
9129
9130 static void
9131 arm_store_return_value (struct type *type, struct regcache *regs,
9132 const gdb_byte *valbuf)
9133 {
9134 struct gdbarch *gdbarch = get_regcache_arch (regs);
9135 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9136
9137 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9138 {
9139 gdb_byte buf[MAX_REGISTER_SIZE];
9140
9141 switch (gdbarch_tdep (gdbarch)->fp_model)
9142 {
9143 case ARM_FLOAT_FPA:
9144
9145 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9146 gdbarch_byte_order (gdbarch));
9147 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9148 break;
9149
9150 case ARM_FLOAT_SOFT_FPA:
9151 case ARM_FLOAT_SOFT_VFP:
9152 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9153 not using the VFP ABI code. */
9154 case ARM_FLOAT_VFP:
9155 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9156 if (TYPE_LENGTH (type) > 4)
9157 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9158 valbuf + INT_REGISTER_SIZE);
9159 break;
9160
9161 default:
9162 internal_error (__FILE__, __LINE__,
9163 _("arm_store_return_value: Floating "
9164 "point model not supported"));
9165 break;
9166 }
9167 }
9168 else if (TYPE_CODE (type) == TYPE_CODE_INT
9169 || TYPE_CODE (type) == TYPE_CODE_CHAR
9170 || TYPE_CODE (type) == TYPE_CODE_BOOL
9171 || TYPE_CODE (type) == TYPE_CODE_PTR
9172 || TYPE_CODE (type) == TYPE_CODE_REF
9173 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9174 {
9175 if (TYPE_LENGTH (type) <= 4)
9176 {
9177 /* Values of one word or less are zero/sign-extended and
9178 returned in r0. */
9179 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9180 LONGEST val = unpack_long (type, valbuf);
9181
9182 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9183 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9184 }
9185 else
9186 {
9187 /* Integral values greater than one word are stored in consecutive
9188 registers starting with r0. This will always be a multiple of
9189 the regiser size. */
9190 int len = TYPE_LENGTH (type);
9191 int regno = ARM_A1_REGNUM;
9192
9193 while (len > 0)
9194 {
9195 regcache_cooked_write (regs, regno++, valbuf);
9196 len -= INT_REGISTER_SIZE;
9197 valbuf += INT_REGISTER_SIZE;
9198 }
9199 }
9200 }
9201 else
9202 {
9203 /* For a structure or union the behaviour is as if the value had
9204 been stored to word-aligned memory and then loaded into
9205 registers with 32-bit load instruction(s). */
9206 int len = TYPE_LENGTH (type);
9207 int regno = ARM_A1_REGNUM;
9208 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9209
9210 while (len > 0)
9211 {
9212 memcpy (tmpbuf, valbuf,
9213 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9214 regcache_cooked_write (regs, regno++, tmpbuf);
9215 len -= INT_REGISTER_SIZE;
9216 valbuf += INT_REGISTER_SIZE;
9217 }
9218 }
9219 }
9220
9221
9222 /* Handle function return values. */
9223
9224 static enum return_value_convention
9225 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9226 struct type *valtype, struct regcache *regcache,
9227 gdb_byte *readbuf, const gdb_byte *writebuf)
9228 {
9229 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9230 struct type *func_type = function ? value_type (function) : NULL;
9231 enum arm_vfp_cprc_base_type vfp_base_type;
9232 int vfp_base_count;
9233
9234 if (arm_vfp_abi_for_function (gdbarch, func_type)
9235 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9236 {
9237 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9238 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9239 int i;
9240 for (i = 0; i < vfp_base_count; i++)
9241 {
9242 if (reg_char == 'q')
9243 {
9244 if (writebuf)
9245 arm_neon_quad_write (gdbarch, regcache, i,
9246 writebuf + i * unit_length);
9247
9248 if (readbuf)
9249 arm_neon_quad_read (gdbarch, regcache, i,
9250 readbuf + i * unit_length);
9251 }
9252 else
9253 {
9254 char name_buf[4];
9255 int regnum;
9256
9257 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9258 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9259 strlen (name_buf));
9260 if (writebuf)
9261 regcache_cooked_write (regcache, regnum,
9262 writebuf + i * unit_length);
9263 if (readbuf)
9264 regcache_cooked_read (regcache, regnum,
9265 readbuf + i * unit_length);
9266 }
9267 }
9268 return RETURN_VALUE_REGISTER_CONVENTION;
9269 }
9270
9271 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9272 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9273 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9274 {
9275 if (tdep->struct_return == pcc_struct_return
9276 || arm_return_in_memory (gdbarch, valtype))
9277 return RETURN_VALUE_STRUCT_CONVENTION;
9278 }
9279
9280 /* AAPCS returns complex types longer than a register in memory. */
9281 if (tdep->arm_abi != ARM_ABI_APCS
9282 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9283 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9284 return RETURN_VALUE_STRUCT_CONVENTION;
9285
9286 if (writebuf)
9287 arm_store_return_value (valtype, regcache, writebuf);
9288
9289 if (readbuf)
9290 arm_extract_return_value (valtype, regcache, readbuf);
9291
9292 return RETURN_VALUE_REGISTER_CONVENTION;
9293 }
9294
9295
9296 static int
9297 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9298 {
9299 struct gdbarch *gdbarch = get_frame_arch (frame);
9300 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9301 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9302 CORE_ADDR jb_addr;
9303 gdb_byte buf[INT_REGISTER_SIZE];
9304
9305 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9306
9307 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9308 INT_REGISTER_SIZE))
9309 return 0;
9310
9311 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9312 return 1;
9313 }
9314
9315 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9316 return the target PC. Otherwise return 0. */
9317
9318 CORE_ADDR
9319 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9320 {
9321 const char *name;
9322 int namelen;
9323 CORE_ADDR start_addr;
9324
9325 /* Find the starting address and name of the function containing the PC. */
9326 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9327 {
9328 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9329 check here. */
9330 start_addr = arm_skip_bx_reg (frame, pc);
9331 if (start_addr != 0)
9332 return start_addr;
9333
9334 return 0;
9335 }
9336
9337 /* If PC is in a Thumb call or return stub, return the address of the
9338 target PC, which is in a register. The thunk functions are called
9339 _call_via_xx, where x is the register name. The possible names
9340 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9341 functions, named __ARM_call_via_r[0-7]. */
9342 if (strncmp (name, "_call_via_", 10) == 0
9343 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9344 {
9345 /* Use the name suffix to determine which register contains the
9346 target PC. */
9347 static char *table[15] =
9348 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9349 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9350 };
9351 int regno;
9352 int offset = strlen (name) - 2;
9353
9354 for (regno = 0; regno <= 14; regno++)
9355 if (strcmp (&name[offset], table[regno]) == 0)
9356 return get_frame_register_unsigned (frame, regno);
9357 }
9358
9359 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9360 non-interworking calls to foo. We could decode the stubs
9361 to find the target but it's easier to use the symbol table. */
9362 namelen = strlen (name);
9363 if (name[0] == '_' && name[1] == '_'
9364 && ((namelen > 2 + strlen ("_from_thumb")
9365 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9366 strlen ("_from_thumb")) == 0)
9367 || (namelen > 2 + strlen ("_from_arm")
9368 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9369 strlen ("_from_arm")) == 0)))
9370 {
9371 char *target_name;
9372 int target_len = namelen - 2;
9373 struct bound_minimal_symbol minsym;
9374 struct objfile *objfile;
9375 struct obj_section *sec;
9376
9377 if (name[namelen - 1] == 'b')
9378 target_len -= strlen ("_from_thumb");
9379 else
9380 target_len -= strlen ("_from_arm");
9381
9382 target_name = alloca (target_len + 1);
9383 memcpy (target_name, name + 2, target_len);
9384 target_name[target_len] = '\0';
9385
9386 sec = find_pc_section (pc);
9387 objfile = (sec == NULL) ? NULL : sec->objfile;
9388 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9389 if (minsym.minsym != NULL)
9390 return BMSYMBOL_VALUE_ADDRESS (minsym);
9391 else
9392 return 0;
9393 }
9394
9395 return 0; /* not a stub */
9396 }
9397
9398 static void
9399 set_arm_command (char *args, int from_tty)
9400 {
9401 printf_unfiltered (_("\
9402 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9403 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9404 }
9405
9406 static void
9407 show_arm_command (char *args, int from_tty)
9408 {
9409 cmd_show_list (showarmcmdlist, from_tty, "");
9410 }
9411
9412 static void
9413 arm_update_current_architecture (void)
9414 {
9415 struct gdbarch_info info;
9416
9417 /* If the current architecture is not ARM, we have nothing to do. */
9418 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9419 return;
9420
9421 /* Update the architecture. */
9422 gdbarch_info_init (&info);
9423
9424 if (!gdbarch_update_p (info))
9425 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9426 }
9427
9428 static void
9429 set_fp_model_sfunc (char *args, int from_tty,
9430 struct cmd_list_element *c)
9431 {
9432 enum arm_float_model fp_model;
9433
9434 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9435 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9436 {
9437 arm_fp_model = fp_model;
9438 break;
9439 }
9440
9441 if (fp_model == ARM_FLOAT_LAST)
9442 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9443 current_fp_model);
9444
9445 arm_update_current_architecture ();
9446 }
9447
9448 static void
9449 show_fp_model (struct ui_file *file, int from_tty,
9450 struct cmd_list_element *c, const char *value)
9451 {
9452 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9453
9454 if (arm_fp_model == ARM_FLOAT_AUTO
9455 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9456 fprintf_filtered (file, _("\
9457 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9458 fp_model_strings[tdep->fp_model]);
9459 else
9460 fprintf_filtered (file, _("\
9461 The current ARM floating point model is \"%s\".\n"),
9462 fp_model_strings[arm_fp_model]);
9463 }
9464
9465 static void
9466 arm_set_abi (char *args, int from_tty,
9467 struct cmd_list_element *c)
9468 {
9469 enum arm_abi_kind arm_abi;
9470
9471 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9472 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9473 {
9474 arm_abi_global = arm_abi;
9475 break;
9476 }
9477
9478 if (arm_abi == ARM_ABI_LAST)
9479 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9480 arm_abi_string);
9481
9482 arm_update_current_architecture ();
9483 }
9484
9485 static void
9486 arm_show_abi (struct ui_file *file, int from_tty,
9487 struct cmd_list_element *c, const char *value)
9488 {
9489 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9490
9491 if (arm_abi_global == ARM_ABI_AUTO
9492 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9493 fprintf_filtered (file, _("\
9494 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9495 arm_abi_strings[tdep->arm_abi]);
9496 else
9497 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9498 arm_abi_string);
9499 }
9500
9501 static void
9502 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9503 struct cmd_list_element *c, const char *value)
9504 {
9505 fprintf_filtered (file,
9506 _("The current execution mode assumed "
9507 "(when symbols are unavailable) is \"%s\".\n"),
9508 arm_fallback_mode_string);
9509 }
9510
9511 static void
9512 arm_show_force_mode (struct ui_file *file, int from_tty,
9513 struct cmd_list_element *c, const char *value)
9514 {
9515 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9516
9517 fprintf_filtered (file,
9518 _("The current execution mode assumed "
9519 "(even when symbols are available) is \"%s\".\n"),
9520 arm_force_mode_string);
9521 }
9522
9523 /* If the user changes the register disassembly style used for info
9524 register and other commands, we have to also switch the style used
9525 in opcodes for disassembly output. This function is run in the "set
9526 arm disassembly" command, and does that. */
9527
9528 static void
9529 set_disassembly_style_sfunc (char *args, int from_tty,
9530 struct cmd_list_element *c)
9531 {
9532 set_disassembly_style ();
9533 }
9534 \f
9535 /* Return the ARM register name corresponding to register I. */
9536 static const char *
9537 arm_register_name (struct gdbarch *gdbarch, int i)
9538 {
9539 const int num_regs = gdbarch_num_regs (gdbarch);
9540
9541 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9542 && i >= num_regs && i < num_regs + 32)
9543 {
9544 static const char *const vfp_pseudo_names[] = {
9545 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9546 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9547 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9548 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9549 };
9550
9551 return vfp_pseudo_names[i - num_regs];
9552 }
9553
9554 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9555 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9556 {
9557 static const char *const neon_pseudo_names[] = {
9558 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9559 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9560 };
9561
9562 return neon_pseudo_names[i - num_regs - 32];
9563 }
9564
9565 if (i >= ARRAY_SIZE (arm_register_names))
9566 /* These registers are only supported on targets which supply
9567 an XML description. */
9568 return "";
9569
9570 return arm_register_names[i];
9571 }
9572
9573 static void
9574 set_disassembly_style (void)
9575 {
9576 int current;
9577
9578 /* Find the style that the user wants. */
9579 for (current = 0; current < num_disassembly_options; current++)
9580 if (disassembly_style == valid_disassembly_styles[current])
9581 break;
9582 gdb_assert (current < num_disassembly_options);
9583
9584 /* Synchronize the disassembler. */
9585 set_arm_regname_option (current);
9586 }
9587
9588 /* Test whether the coff symbol specific value corresponds to a Thumb
9589 function. */
9590
9591 static int
9592 coff_sym_is_thumb (int val)
9593 {
9594 return (val == C_THUMBEXT
9595 || val == C_THUMBSTAT
9596 || val == C_THUMBEXTFUNC
9597 || val == C_THUMBSTATFUNC
9598 || val == C_THUMBLABEL);
9599 }
9600
9601 /* arm_coff_make_msymbol_special()
9602 arm_elf_make_msymbol_special()
9603
9604 These functions test whether the COFF or ELF symbol corresponds to
9605 an address in thumb code, and set a "special" bit in a minimal
9606 symbol to indicate that it does. */
9607
9608 static void
9609 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9610 {
9611 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9612 == ST_BRANCH_TO_THUMB)
9613 MSYMBOL_SET_SPECIAL (msym);
9614 }
9615
9616 static void
9617 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9618 {
9619 if (coff_sym_is_thumb (val))
9620 MSYMBOL_SET_SPECIAL (msym);
9621 }
9622
9623 static void
9624 arm_objfile_data_free (struct objfile *objfile, void *arg)
9625 {
9626 struct arm_per_objfile *data = arg;
9627 unsigned int i;
9628
9629 for (i = 0; i < objfile->obfd->section_count; i++)
9630 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9631 }
9632
9633 static void
9634 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9635 asymbol *sym)
9636 {
9637 const char *name = bfd_asymbol_name (sym);
9638 struct arm_per_objfile *data;
9639 VEC(arm_mapping_symbol_s) **map_p;
9640 struct arm_mapping_symbol new_map_sym;
9641
9642 gdb_assert (name[0] == '$');
9643 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9644 return;
9645
9646 data = objfile_data (objfile, arm_objfile_data_key);
9647 if (data == NULL)
9648 {
9649 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9650 struct arm_per_objfile);
9651 set_objfile_data (objfile, arm_objfile_data_key, data);
9652 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9653 objfile->obfd->section_count,
9654 VEC(arm_mapping_symbol_s) *);
9655 }
9656 map_p = &data->section_maps[bfd_get_section (sym)->index];
9657
9658 new_map_sym.value = sym->value;
9659 new_map_sym.type = name[1];
9660
9661 /* Assume that most mapping symbols appear in order of increasing
9662 value. If they were randomly distributed, it would be faster to
9663 always push here and then sort at first use. */
9664 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9665 {
9666 struct arm_mapping_symbol *prev_map_sym;
9667
9668 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9669 if (prev_map_sym->value >= sym->value)
9670 {
9671 unsigned int idx;
9672 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9673 arm_compare_mapping_symbols);
9674 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9675 return;
9676 }
9677 }
9678
9679 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9680 }
9681
9682 static void
9683 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9684 {
9685 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9686 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9687
9688 /* If necessary, set the T bit. */
9689 if (arm_apcs_32)
9690 {
9691 ULONGEST val, t_bit;
9692 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9693 t_bit = arm_psr_thumb_bit (gdbarch);
9694 if (arm_pc_is_thumb (gdbarch, pc))
9695 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9696 val | t_bit);
9697 else
9698 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9699 val & ~t_bit);
9700 }
9701 }
9702
9703 /* Read the contents of a NEON quad register, by reading from two
9704 double registers. This is used to implement the quad pseudo
9705 registers, and for argument passing in case the quad registers are
9706 missing; vectors are passed in quad registers when using the VFP
9707 ABI, even if a NEON unit is not present. REGNUM is the index of
9708 the quad register, in [0, 15]. */
9709
9710 static enum register_status
9711 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9712 int regnum, gdb_byte *buf)
9713 {
9714 char name_buf[4];
9715 gdb_byte reg_buf[8];
9716 int offset, double_regnum;
9717 enum register_status status;
9718
9719 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9720 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9721 strlen (name_buf));
9722
9723 /* d0 is always the least significant half of q0. */
9724 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9725 offset = 8;
9726 else
9727 offset = 0;
9728
9729 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9730 if (status != REG_VALID)
9731 return status;
9732 memcpy (buf + offset, reg_buf, 8);
9733
9734 offset = 8 - offset;
9735 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9736 if (status != REG_VALID)
9737 return status;
9738 memcpy (buf + offset, reg_buf, 8);
9739
9740 return REG_VALID;
9741 }
9742
9743 static enum register_status
9744 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9745 int regnum, gdb_byte *buf)
9746 {
9747 const int num_regs = gdbarch_num_regs (gdbarch);
9748 char name_buf[4];
9749 gdb_byte reg_buf[8];
9750 int offset, double_regnum;
9751
9752 gdb_assert (regnum >= num_regs);
9753 regnum -= num_regs;
9754
9755 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9756 /* Quad-precision register. */
9757 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9758 else
9759 {
9760 enum register_status status;
9761
9762 /* Single-precision register. */
9763 gdb_assert (regnum < 32);
9764
9765 /* s0 is always the least significant half of d0. */
9766 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9767 offset = (regnum & 1) ? 0 : 4;
9768 else
9769 offset = (regnum & 1) ? 4 : 0;
9770
9771 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9772 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9773 strlen (name_buf));
9774
9775 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9776 if (status == REG_VALID)
9777 memcpy (buf, reg_buf + offset, 4);
9778 return status;
9779 }
9780 }
9781
9782 /* Store the contents of BUF to a NEON quad register, by writing to
9783 two double registers. This is used to implement the quad pseudo
9784 registers, and for argument passing in case the quad registers are
9785 missing; vectors are passed in quad registers when using the VFP
9786 ABI, even if a NEON unit is not present. REGNUM is the index
9787 of the quad register, in [0, 15]. */
9788
9789 static void
9790 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9791 int regnum, const gdb_byte *buf)
9792 {
9793 char name_buf[4];
9794 int offset, double_regnum;
9795
9796 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9797 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9798 strlen (name_buf));
9799
9800 /* d0 is always the least significant half of q0. */
9801 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9802 offset = 8;
9803 else
9804 offset = 0;
9805
9806 regcache_raw_write (regcache, double_regnum, buf + offset);
9807 offset = 8 - offset;
9808 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9809 }
9810
9811 static void
9812 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9813 int regnum, const gdb_byte *buf)
9814 {
9815 const int num_regs = gdbarch_num_regs (gdbarch);
9816 char name_buf[4];
9817 gdb_byte reg_buf[8];
9818 int offset, double_regnum;
9819
9820 gdb_assert (regnum >= num_regs);
9821 regnum -= num_regs;
9822
9823 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9824 /* Quad-precision register. */
9825 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9826 else
9827 {
9828 /* Single-precision register. */
9829 gdb_assert (regnum < 32);
9830
9831 /* s0 is always the least significant half of d0. */
9832 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9833 offset = (regnum & 1) ? 0 : 4;
9834 else
9835 offset = (regnum & 1) ? 4 : 0;
9836
9837 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9838 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9839 strlen (name_buf));
9840
9841 regcache_raw_read (regcache, double_regnum, reg_buf);
9842 memcpy (reg_buf + offset, buf, 4);
9843 regcache_raw_write (regcache, double_regnum, reg_buf);
9844 }
9845 }
9846
9847 static struct value *
9848 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9849 {
9850 const int *reg_p = baton;
9851 return value_of_register (*reg_p, frame);
9852 }
9853 \f
9854 static enum gdb_osabi
9855 arm_elf_osabi_sniffer (bfd *abfd)
9856 {
9857 unsigned int elfosabi;
9858 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9859
9860 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9861
9862 if (elfosabi == ELFOSABI_ARM)
9863 /* GNU tools use this value. Check note sections in this case,
9864 as well. */
9865 bfd_map_over_sections (abfd,
9866 generic_elf_osabi_sniff_abi_tag_sections,
9867 &osabi);
9868
9869 /* Anything else will be handled by the generic ELF sniffer. */
9870 return osabi;
9871 }
9872
9873 static int
9874 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9875 struct reggroup *group)
9876 {
9877 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9878 this, FPS register belongs to save_regroup, restore_reggroup, and
9879 all_reggroup, of course. */
9880 if (regnum == ARM_FPS_REGNUM)
9881 return (group == float_reggroup
9882 || group == save_reggroup
9883 || group == restore_reggroup
9884 || group == all_reggroup);
9885 else
9886 return default_register_reggroup_p (gdbarch, regnum, group);
9887 }
9888
9889 \f
9890 /* For backward-compatibility we allow two 'g' packet lengths with
9891 the remote protocol depending on whether FPA registers are
9892 supplied. M-profile targets do not have FPA registers, but some
9893 stubs already exist in the wild which use a 'g' packet which
9894 supplies them albeit with dummy values. The packet format which
9895 includes FPA registers should be considered deprecated for
9896 M-profile targets. */
9897
9898 static void
9899 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9900 {
9901 if (gdbarch_tdep (gdbarch)->is_m)
9902 {
9903 /* If we know from the executable this is an M-profile target,
9904 cater for remote targets whose register set layout is the
9905 same as the FPA layout. */
9906 register_remote_g_packet_guess (gdbarch,
9907 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9908 (16 * INT_REGISTER_SIZE)
9909 + (8 * FP_REGISTER_SIZE)
9910 + (2 * INT_REGISTER_SIZE),
9911 tdesc_arm_with_m_fpa_layout);
9912
9913 /* The regular M-profile layout. */
9914 register_remote_g_packet_guess (gdbarch,
9915 /* r0-r12,sp,lr,pc; xpsr */
9916 (16 * INT_REGISTER_SIZE)
9917 + INT_REGISTER_SIZE,
9918 tdesc_arm_with_m);
9919
9920 /* M-profile plus M4F VFP. */
9921 register_remote_g_packet_guess (gdbarch,
9922 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9923 (16 * INT_REGISTER_SIZE)
9924 + (16 * VFP_REGISTER_SIZE)
9925 + (2 * INT_REGISTER_SIZE),
9926 tdesc_arm_with_m_vfp_d16);
9927 }
9928
9929 /* Otherwise we don't have a useful guess. */
9930 }
9931
9932 \f
9933 /* Initialize the current architecture based on INFO. If possible,
9934 re-use an architecture from ARCHES, which is a list of
9935 architectures already created during this debugging session.
9936
9937 Called e.g. at program startup, when reading a core file, and when
9938 reading a binary file. */
9939
9940 static struct gdbarch *
9941 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9942 {
9943 struct gdbarch_tdep *tdep;
9944 struct gdbarch *gdbarch;
9945 struct gdbarch_list *best_arch;
9946 enum arm_abi_kind arm_abi = arm_abi_global;
9947 enum arm_float_model fp_model = arm_fp_model;
9948 struct tdesc_arch_data *tdesc_data = NULL;
9949 int i, is_m = 0;
9950 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9951 int have_neon = 0;
9952 int have_fpa_registers = 1;
9953 const struct target_desc *tdesc = info.target_desc;
9954
9955 /* If we have an object to base this architecture on, try to determine
9956 its ABI. */
9957
9958 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9959 {
9960 int ei_osabi, e_flags;
9961
9962 switch (bfd_get_flavour (info.abfd))
9963 {
9964 case bfd_target_aout_flavour:
9965 /* Assume it's an old APCS-style ABI. */
9966 arm_abi = ARM_ABI_APCS;
9967 break;
9968
9969 case bfd_target_coff_flavour:
9970 /* Assume it's an old APCS-style ABI. */
9971 /* XXX WinCE? */
9972 arm_abi = ARM_ABI_APCS;
9973 break;
9974
9975 case bfd_target_elf_flavour:
9976 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9977 e_flags = elf_elfheader (info.abfd)->e_flags;
9978
9979 if (ei_osabi == ELFOSABI_ARM)
9980 {
9981 /* GNU tools used to use this value, but do not for EABI
9982 objects. There's nowhere to tag an EABI version
9983 anyway, so assume APCS. */
9984 arm_abi = ARM_ABI_APCS;
9985 }
9986 else if (ei_osabi == ELFOSABI_NONE)
9987 {
9988 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9989 int attr_arch, attr_profile;
9990
9991 switch (eabi_ver)
9992 {
9993 case EF_ARM_EABI_UNKNOWN:
9994 /* Assume GNU tools. */
9995 arm_abi = ARM_ABI_APCS;
9996 break;
9997
9998 case EF_ARM_EABI_VER4:
9999 case EF_ARM_EABI_VER5:
10000 arm_abi = ARM_ABI_AAPCS;
10001 /* EABI binaries default to VFP float ordering.
10002 They may also contain build attributes that can
10003 be used to identify if the VFP argument-passing
10004 ABI is in use. */
10005 if (fp_model == ARM_FLOAT_AUTO)
10006 {
10007 #ifdef HAVE_ELF
10008 switch (bfd_elf_get_obj_attr_int (info.abfd,
10009 OBJ_ATTR_PROC,
10010 Tag_ABI_VFP_args))
10011 {
10012 case 0:
10013 /* "The user intended FP parameter/result
10014 passing to conform to AAPCS, base
10015 variant". */
10016 fp_model = ARM_FLOAT_SOFT_VFP;
10017 break;
10018 case 1:
10019 /* "The user intended FP parameter/result
10020 passing to conform to AAPCS, VFP
10021 variant". */
10022 fp_model = ARM_FLOAT_VFP;
10023 break;
10024 case 2:
10025 /* "The user intended FP parameter/result
10026 passing to conform to tool chain-specific
10027 conventions" - we don't know any such
10028 conventions, so leave it as "auto". */
10029 break;
10030 default:
10031 /* Attribute value not mentioned in the
10032 October 2008 ABI, so leave it as
10033 "auto". */
10034 break;
10035 }
10036 #else
10037 fp_model = ARM_FLOAT_SOFT_VFP;
10038 #endif
10039 }
10040 break;
10041
10042 default:
10043 /* Leave it as "auto". */
10044 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10045 break;
10046 }
10047
10048 #ifdef HAVE_ELF
10049 /* Detect M-profile programs. This only works if the
10050 executable file includes build attributes; GCC does
10051 copy them to the executable, but e.g. RealView does
10052 not. */
10053 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10054 Tag_CPU_arch);
10055 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10056 OBJ_ATTR_PROC,
10057 Tag_CPU_arch_profile);
10058 /* GCC specifies the profile for v6-M; RealView only
10059 specifies the profile for architectures starting with
10060 V7 (as opposed to architectures with a tag
10061 numerically greater than TAG_CPU_ARCH_V7). */
10062 if (!tdesc_has_registers (tdesc)
10063 && (attr_arch == TAG_CPU_ARCH_V6_M
10064 || attr_arch == TAG_CPU_ARCH_V6S_M
10065 || attr_profile == 'M'))
10066 is_m = 1;
10067 #endif
10068 }
10069
10070 if (fp_model == ARM_FLOAT_AUTO)
10071 {
10072 int e_flags = elf_elfheader (info.abfd)->e_flags;
10073
10074 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10075 {
10076 case 0:
10077 /* Leave it as "auto". Strictly speaking this case
10078 means FPA, but almost nobody uses that now, and
10079 many toolchains fail to set the appropriate bits
10080 for the floating-point model they use. */
10081 break;
10082 case EF_ARM_SOFT_FLOAT:
10083 fp_model = ARM_FLOAT_SOFT_FPA;
10084 break;
10085 case EF_ARM_VFP_FLOAT:
10086 fp_model = ARM_FLOAT_VFP;
10087 break;
10088 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10089 fp_model = ARM_FLOAT_SOFT_VFP;
10090 break;
10091 }
10092 }
10093
10094 if (e_flags & EF_ARM_BE8)
10095 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10096
10097 break;
10098
10099 default:
10100 /* Leave it as "auto". */
10101 break;
10102 }
10103 }
10104
10105 /* Check any target description for validity. */
10106 if (tdesc_has_registers (tdesc))
10107 {
10108 /* For most registers we require GDB's default names; but also allow
10109 the numeric names for sp / lr / pc, as a convenience. */
10110 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10111 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10112 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10113
10114 const struct tdesc_feature *feature;
10115 int valid_p;
10116
10117 feature = tdesc_find_feature (tdesc,
10118 "org.gnu.gdb.arm.core");
10119 if (feature == NULL)
10120 {
10121 feature = tdesc_find_feature (tdesc,
10122 "org.gnu.gdb.arm.m-profile");
10123 if (feature == NULL)
10124 return NULL;
10125 else
10126 is_m = 1;
10127 }
10128
10129 tdesc_data = tdesc_data_alloc ();
10130
10131 valid_p = 1;
10132 for (i = 0; i < ARM_SP_REGNUM; i++)
10133 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10134 arm_register_names[i]);
10135 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10136 ARM_SP_REGNUM,
10137 arm_sp_names);
10138 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10139 ARM_LR_REGNUM,
10140 arm_lr_names);
10141 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10142 ARM_PC_REGNUM,
10143 arm_pc_names);
10144 if (is_m)
10145 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10146 ARM_PS_REGNUM, "xpsr");
10147 else
10148 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10149 ARM_PS_REGNUM, "cpsr");
10150
10151 if (!valid_p)
10152 {
10153 tdesc_data_cleanup (tdesc_data);
10154 return NULL;
10155 }
10156
10157 feature = tdesc_find_feature (tdesc,
10158 "org.gnu.gdb.arm.fpa");
10159 if (feature != NULL)
10160 {
10161 valid_p = 1;
10162 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10163 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10164 arm_register_names[i]);
10165 if (!valid_p)
10166 {
10167 tdesc_data_cleanup (tdesc_data);
10168 return NULL;
10169 }
10170 }
10171 else
10172 have_fpa_registers = 0;
10173
10174 feature = tdesc_find_feature (tdesc,
10175 "org.gnu.gdb.xscale.iwmmxt");
10176 if (feature != NULL)
10177 {
10178 static const char *const iwmmxt_names[] = {
10179 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10180 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10181 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10182 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10183 };
10184
10185 valid_p = 1;
10186 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10187 valid_p
10188 &= tdesc_numbered_register (feature, tdesc_data, i,
10189 iwmmxt_names[i - ARM_WR0_REGNUM]);
10190
10191 /* Check for the control registers, but do not fail if they
10192 are missing. */
10193 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10194 tdesc_numbered_register (feature, tdesc_data, i,
10195 iwmmxt_names[i - ARM_WR0_REGNUM]);
10196
10197 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10198 valid_p
10199 &= tdesc_numbered_register (feature, tdesc_data, i,
10200 iwmmxt_names[i - ARM_WR0_REGNUM]);
10201
10202 if (!valid_p)
10203 {
10204 tdesc_data_cleanup (tdesc_data);
10205 return NULL;
10206 }
10207 }
10208
10209 /* If we have a VFP unit, check whether the single precision registers
10210 are present. If not, then we will synthesize them as pseudo
10211 registers. */
10212 feature = tdesc_find_feature (tdesc,
10213 "org.gnu.gdb.arm.vfp");
10214 if (feature != NULL)
10215 {
10216 static const char *const vfp_double_names[] = {
10217 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10218 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10219 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10220 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10221 };
10222
10223 /* Require the double precision registers. There must be either
10224 16 or 32. */
10225 valid_p = 1;
10226 for (i = 0; i < 32; i++)
10227 {
10228 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10229 ARM_D0_REGNUM + i,
10230 vfp_double_names[i]);
10231 if (!valid_p)
10232 break;
10233 }
10234 if (!valid_p && i == 16)
10235 valid_p = 1;
10236
10237 /* Also require FPSCR. */
10238 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10239 ARM_FPSCR_REGNUM, "fpscr");
10240 if (!valid_p)
10241 {
10242 tdesc_data_cleanup (tdesc_data);
10243 return NULL;
10244 }
10245
10246 if (tdesc_unnumbered_register (feature, "s0") == 0)
10247 have_vfp_pseudos = 1;
10248
10249 have_vfp_registers = 1;
10250
10251 /* If we have VFP, also check for NEON. The architecture allows
10252 NEON without VFP (integer vector operations only), but GDB
10253 does not support that. */
10254 feature = tdesc_find_feature (tdesc,
10255 "org.gnu.gdb.arm.neon");
10256 if (feature != NULL)
10257 {
10258 /* NEON requires 32 double-precision registers. */
10259 if (i != 32)
10260 {
10261 tdesc_data_cleanup (tdesc_data);
10262 return NULL;
10263 }
10264
10265 /* If there are quad registers defined by the stub, use
10266 their type; otherwise (normally) provide them with
10267 the default type. */
10268 if (tdesc_unnumbered_register (feature, "q0") == 0)
10269 have_neon_pseudos = 1;
10270
10271 have_neon = 1;
10272 }
10273 }
10274 }
10275
10276 /* If there is already a candidate, use it. */
10277 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10278 best_arch != NULL;
10279 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10280 {
10281 if (arm_abi != ARM_ABI_AUTO
10282 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10283 continue;
10284
10285 if (fp_model != ARM_FLOAT_AUTO
10286 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10287 continue;
10288
10289 /* There are various other properties in tdep that we do not
10290 need to check here: those derived from a target description,
10291 since gdbarches with a different target description are
10292 automatically disqualified. */
10293
10294 /* Do check is_m, though, since it might come from the binary. */
10295 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10296 continue;
10297
10298 /* Found a match. */
10299 break;
10300 }
10301
10302 if (best_arch != NULL)
10303 {
10304 if (tdesc_data != NULL)
10305 tdesc_data_cleanup (tdesc_data);
10306 return best_arch->gdbarch;
10307 }
10308
10309 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10310 gdbarch = gdbarch_alloc (&info, tdep);
10311
10312 /* Record additional information about the architecture we are defining.
10313 These are gdbarch discriminators, like the OSABI. */
10314 tdep->arm_abi = arm_abi;
10315 tdep->fp_model = fp_model;
10316 tdep->is_m = is_m;
10317 tdep->have_fpa_registers = have_fpa_registers;
10318 tdep->have_vfp_registers = have_vfp_registers;
10319 tdep->have_vfp_pseudos = have_vfp_pseudos;
10320 tdep->have_neon_pseudos = have_neon_pseudos;
10321 tdep->have_neon = have_neon;
10322
10323 arm_register_g_packet_guesses (gdbarch);
10324
10325 /* Breakpoints. */
10326 switch (info.byte_order_for_code)
10327 {
10328 case BFD_ENDIAN_BIG:
10329 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10330 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10331 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10332 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10333
10334 break;
10335
10336 case BFD_ENDIAN_LITTLE:
10337 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10338 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10339 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10340 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10341
10342 break;
10343
10344 default:
10345 internal_error (__FILE__, __LINE__,
10346 _("arm_gdbarch_init: bad byte order for float format"));
10347 }
10348
10349 /* On ARM targets char defaults to unsigned. */
10350 set_gdbarch_char_signed (gdbarch, 0);
10351
10352 /* Note: for displaced stepping, this includes the breakpoint, and one word
10353 of additional scratch space. This setting isn't used for anything beside
10354 displaced stepping at present. */
10355 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10356
10357 /* This should be low enough for everything. */
10358 tdep->lowest_pc = 0x20;
10359 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10360
10361 /* The default, for both APCS and AAPCS, is to return small
10362 structures in registers. */
10363 tdep->struct_return = reg_struct_return;
10364
10365 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10366 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10367
10368 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10369
10370 /* Frame handling. */
10371 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10372 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10373 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10374
10375 frame_base_set_default (gdbarch, &arm_normal_base);
10376
10377 /* Address manipulation. */
10378 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10379
10380 /* Advance PC across function entry code. */
10381 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10382
10383 /* Detect whether PC is in function epilogue. */
10384 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10385
10386 /* Skip trampolines. */
10387 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10388
10389 /* The stack grows downward. */
10390 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10391
10392 /* Breakpoint manipulation. */
10393 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10394 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10395 arm_remote_breakpoint_from_pc);
10396
10397 /* Information about registers, etc. */
10398 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10399 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10400 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10401 set_gdbarch_register_type (gdbarch, arm_register_type);
10402 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10403
10404 /* This "info float" is FPA-specific. Use the generic version if we
10405 do not have FPA. */
10406 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10407 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10408
10409 /* Internal <-> external register number maps. */
10410 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10411 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10412
10413 set_gdbarch_register_name (gdbarch, arm_register_name);
10414
10415 /* Returning results. */
10416 set_gdbarch_return_value (gdbarch, arm_return_value);
10417
10418 /* Disassembly. */
10419 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10420
10421 /* Minsymbol frobbing. */
10422 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10423 set_gdbarch_coff_make_msymbol_special (gdbarch,
10424 arm_coff_make_msymbol_special);
10425 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10426
10427 /* Thumb-2 IT block support. */
10428 set_gdbarch_adjust_breakpoint_address (gdbarch,
10429 arm_adjust_breakpoint_address);
10430
10431 /* Virtual tables. */
10432 set_gdbarch_vbit_in_delta (gdbarch, 1);
10433
10434 /* Hook in the ABI-specific overrides, if they have been registered. */
10435 gdbarch_init_osabi (info, gdbarch);
10436
10437 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10438
10439 /* Add some default predicates. */
10440 if (is_m)
10441 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10442 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10443 dwarf2_append_unwinders (gdbarch);
10444 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10445 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10446
10447 /* Now we have tuned the configuration, set a few final things,
10448 based on what the OS ABI has told us. */
10449
10450 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10451 binaries are always marked. */
10452 if (tdep->arm_abi == ARM_ABI_AUTO)
10453 tdep->arm_abi = ARM_ABI_APCS;
10454
10455 /* Watchpoints are not steppable. */
10456 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10457
10458 /* We used to default to FPA for generic ARM, but almost nobody
10459 uses that now, and we now provide a way for the user to force
10460 the model. So default to the most useful variant. */
10461 if (tdep->fp_model == ARM_FLOAT_AUTO)
10462 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10463
10464 if (tdep->jb_pc >= 0)
10465 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10466
10467 /* Floating point sizes and format. */
10468 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10469 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10470 {
10471 set_gdbarch_double_format
10472 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10473 set_gdbarch_long_double_format
10474 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10475 }
10476 else
10477 {
10478 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10479 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10480 }
10481
10482 if (have_vfp_pseudos)
10483 {
10484 /* NOTE: These are the only pseudo registers used by
10485 the ARM target at the moment. If more are added, a
10486 little more care in numbering will be needed. */
10487
10488 int num_pseudos = 32;
10489 if (have_neon_pseudos)
10490 num_pseudos += 16;
10491 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10492 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10493 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10494 }
10495
10496 if (tdesc_data)
10497 {
10498 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10499
10500 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10501
10502 /* Override tdesc_register_type to adjust the types of VFP
10503 registers for NEON. */
10504 set_gdbarch_register_type (gdbarch, arm_register_type);
10505 }
10506
10507 /* Add standard register aliases. We add aliases even for those
10508 nanes which are used by the current architecture - it's simpler,
10509 and does no harm, since nothing ever lists user registers. */
10510 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10511 user_reg_add (gdbarch, arm_register_aliases[i].name,
10512 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10513
10514 return gdbarch;
10515 }
10516
10517 static void
10518 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10519 {
10520 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10521
10522 if (tdep == NULL)
10523 return;
10524
10525 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10526 (unsigned long) tdep->lowest_pc);
10527 }
10528
10529 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10530
10531 void
10532 _initialize_arm_tdep (void)
10533 {
10534 struct ui_file *stb;
10535 long length;
10536 struct cmd_list_element *new_set, *new_show;
10537 const char *setname;
10538 const char *setdesc;
10539 const char *const *regnames;
10540 int numregs, i, j;
10541 static char *helptext;
10542 char regdesc[1024], *rdptr = regdesc;
10543 size_t rest = sizeof (regdesc);
10544
10545 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10546
10547 arm_objfile_data_key
10548 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10549
10550 /* Add ourselves to objfile event chain. */
10551 observer_attach_new_objfile (arm_exidx_new_objfile);
10552 arm_exidx_data_key
10553 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10554
10555 /* Register an ELF OS ABI sniffer for ARM binaries. */
10556 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10557 bfd_target_elf_flavour,
10558 arm_elf_osabi_sniffer);
10559
10560 /* Initialize the standard target descriptions. */
10561 initialize_tdesc_arm_with_m ();
10562 initialize_tdesc_arm_with_m_fpa_layout ();
10563 initialize_tdesc_arm_with_m_vfp_d16 ();
10564 initialize_tdesc_arm_with_iwmmxt ();
10565 initialize_tdesc_arm_with_vfpv2 ();
10566 initialize_tdesc_arm_with_vfpv3 ();
10567 initialize_tdesc_arm_with_neon ();
10568
10569 /* Get the number of possible sets of register names defined in opcodes. */
10570 num_disassembly_options = get_arm_regname_num_options ();
10571
10572 /* Add root prefix command for all "set arm"/"show arm" commands. */
10573 add_prefix_cmd ("arm", no_class, set_arm_command,
10574 _("Various ARM-specific commands."),
10575 &setarmcmdlist, "set arm ", 0, &setlist);
10576
10577 add_prefix_cmd ("arm", no_class, show_arm_command,
10578 _("Various ARM-specific commands."),
10579 &showarmcmdlist, "show arm ", 0, &showlist);
10580
10581 /* Sync the opcode insn printer with our register viewer. */
10582 parse_arm_disassembler_option ("reg-names-std");
10583
10584 /* Initialize the array that will be passed to
10585 add_setshow_enum_cmd(). */
10586 valid_disassembly_styles
10587 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10588 for (i = 0; i < num_disassembly_options; i++)
10589 {
10590 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10591 valid_disassembly_styles[i] = setname;
10592 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10593 rdptr += length;
10594 rest -= length;
10595 /* When we find the default names, tell the disassembler to use
10596 them. */
10597 if (!strcmp (setname, "std"))
10598 {
10599 disassembly_style = setname;
10600 set_arm_regname_option (i);
10601 }
10602 }
10603 /* Mark the end of valid options. */
10604 valid_disassembly_styles[num_disassembly_options] = NULL;
10605
10606 /* Create the help text. */
10607 stb = mem_fileopen ();
10608 fprintf_unfiltered (stb, "%s%s%s",
10609 _("The valid values are:\n"),
10610 regdesc,
10611 _("The default is \"std\"."));
10612 helptext = ui_file_xstrdup (stb, NULL);
10613 ui_file_delete (stb);
10614
10615 add_setshow_enum_cmd("disassembler", no_class,
10616 valid_disassembly_styles, &disassembly_style,
10617 _("Set the disassembly style."),
10618 _("Show the disassembly style."),
10619 helptext,
10620 set_disassembly_style_sfunc,
10621 NULL, /* FIXME: i18n: The disassembly style is
10622 \"%s\". */
10623 &setarmcmdlist, &showarmcmdlist);
10624
10625 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10626 _("Set usage of ARM 32-bit mode."),
10627 _("Show usage of ARM 32-bit mode."),
10628 _("When off, a 26-bit PC will be used."),
10629 NULL,
10630 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10631 mode is %s. */
10632 &setarmcmdlist, &showarmcmdlist);
10633
10634 /* Add a command to allow the user to force the FPU model. */
10635 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10636 _("Set the floating point type."),
10637 _("Show the floating point type."),
10638 _("auto - Determine the FP typefrom the OS-ABI.\n\
10639 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10640 fpa - FPA co-processor (GCC compiled).\n\
10641 softvfp - Software FP with pure-endian doubles.\n\
10642 vfp - VFP co-processor."),
10643 set_fp_model_sfunc, show_fp_model,
10644 &setarmcmdlist, &showarmcmdlist);
10645
10646 /* Add a command to allow the user to force the ABI. */
10647 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10648 _("Set the ABI."),
10649 _("Show the ABI."),
10650 NULL, arm_set_abi, arm_show_abi,
10651 &setarmcmdlist, &showarmcmdlist);
10652
10653 /* Add two commands to allow the user to force the assumed
10654 execution mode. */
10655 add_setshow_enum_cmd ("fallback-mode", class_support,
10656 arm_mode_strings, &arm_fallback_mode_string,
10657 _("Set the mode assumed when symbols are unavailable."),
10658 _("Show the mode assumed when symbols are unavailable."),
10659 NULL, NULL, arm_show_fallback_mode,
10660 &setarmcmdlist, &showarmcmdlist);
10661 add_setshow_enum_cmd ("force-mode", class_support,
10662 arm_mode_strings, &arm_force_mode_string,
10663 _("Set the mode assumed even when symbols are available."),
10664 _("Show the mode assumed even when symbols are available."),
10665 NULL, NULL, arm_show_force_mode,
10666 &setarmcmdlist, &showarmcmdlist);
10667
10668 /* Debugging flag. */
10669 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10670 _("Set ARM debugging."),
10671 _("Show ARM debugging."),
10672 _("When on, arm-specific debugging is enabled."),
10673 NULL,
10674 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10675 &setdebuglist, &showdebuglist);
10676 }
10677
10678 /* ARM-reversible process record data structures. */
10679
10680 #define ARM_INSN_SIZE_BYTES 4
10681 #define THUMB_INSN_SIZE_BYTES 2
10682 #define THUMB2_INSN_SIZE_BYTES 4
10683
10684
10685 #define INSN_S_L_BIT_NUM 20
10686
10687 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10688 do \
10689 { \
10690 unsigned int reg_len = LENGTH; \
10691 if (reg_len) \
10692 { \
10693 REGS = XNEWVEC (uint32_t, reg_len); \
10694 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10695 } \
10696 } \
10697 while (0)
10698
10699 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10700 do \
10701 { \
10702 unsigned int mem_len = LENGTH; \
10703 if (mem_len) \
10704 { \
10705 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10706 memcpy(&MEMS->len, &RECORD_BUF[0], \
10707 sizeof(struct arm_mem_r) * LENGTH); \
10708 } \
10709 } \
10710 while (0)
10711
10712 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10713 #define INSN_RECORDED(ARM_RECORD) \
10714 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10715
10716 /* ARM memory record structure. */
10717 struct arm_mem_r
10718 {
10719 uint32_t len; /* Record length. */
10720 uint32_t addr; /* Memory address. */
10721 };
10722
10723 /* ARM instruction record contains opcode of current insn
10724 and execution state (before entry to decode_insn()),
10725 contains list of to-be-modified registers and
10726 memory blocks (on return from decode_insn()). */
10727
10728 typedef struct insn_decode_record_t
10729 {
10730 struct gdbarch *gdbarch;
10731 struct regcache *regcache;
10732 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10733 uint32_t arm_insn; /* Should accommodate thumb. */
10734 uint32_t cond; /* Condition code. */
10735 uint32_t opcode; /* Insn opcode. */
10736 uint32_t decode; /* Insn decode bits. */
10737 uint32_t mem_rec_count; /* No of mem records. */
10738 uint32_t reg_rec_count; /* No of reg records. */
10739 uint32_t *arm_regs; /* Registers to be saved for this record. */
10740 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10741 } insn_decode_record;
10742
10743
10744 /* Checks ARM SBZ and SBO mandatory fields. */
10745
10746 static int
10747 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10748 {
10749 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10750
10751 if (!len)
10752 return 1;
10753
10754 if (!sbo)
10755 ones = ~ones;
10756
10757 while (ones)
10758 {
10759 if (!(ones & sbo))
10760 {
10761 return 0;
10762 }
10763 ones = ones >> 1;
10764 }
10765 return 1;
10766 }
10767
10768 enum arm_record_result
10769 {
10770 ARM_RECORD_SUCCESS = 0,
10771 ARM_RECORD_FAILURE = 1
10772 };
10773
10774 typedef enum
10775 {
10776 ARM_RECORD_STRH=1,
10777 ARM_RECORD_STRD
10778 } arm_record_strx_t;
10779
10780 typedef enum
10781 {
10782 ARM_RECORD=1,
10783 THUMB_RECORD,
10784 THUMB2_RECORD
10785 } record_type_t;
10786
10787
10788 static int
10789 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10790 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10791 {
10792
10793 struct regcache *reg_cache = arm_insn_r->regcache;
10794 ULONGEST u_regval[2]= {0};
10795
10796 uint32_t reg_src1 = 0, reg_src2 = 0;
10797 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10798 uint32_t opcode1 = 0;
10799
10800 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10801 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10802 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10803
10804
10805 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10806 {
10807 /* 1) Handle misc store, immediate offset. */
10808 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10809 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10810 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10811 regcache_raw_read_unsigned (reg_cache, reg_src1,
10812 &u_regval[0]);
10813 if (ARM_PC_REGNUM == reg_src1)
10814 {
10815 /* If R15 was used as Rn, hence current PC+8. */
10816 u_regval[0] = u_regval[0] + 8;
10817 }
10818 offset_8 = (immed_high << 4) | immed_low;
10819 /* Calculate target store address. */
10820 if (14 == arm_insn_r->opcode)
10821 {
10822 tgt_mem_addr = u_regval[0] + offset_8;
10823 }
10824 else
10825 {
10826 tgt_mem_addr = u_regval[0] - offset_8;
10827 }
10828 if (ARM_RECORD_STRH == str_type)
10829 {
10830 record_buf_mem[0] = 2;
10831 record_buf_mem[1] = tgt_mem_addr;
10832 arm_insn_r->mem_rec_count = 1;
10833 }
10834 else if (ARM_RECORD_STRD == str_type)
10835 {
10836 record_buf_mem[0] = 4;
10837 record_buf_mem[1] = tgt_mem_addr;
10838 record_buf_mem[2] = 4;
10839 record_buf_mem[3] = tgt_mem_addr + 4;
10840 arm_insn_r->mem_rec_count = 2;
10841 }
10842 }
10843 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10844 {
10845 /* 2) Store, register offset. */
10846 /* Get Rm. */
10847 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10848 /* Get Rn. */
10849 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10850 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10851 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10852 if (15 == reg_src2)
10853 {
10854 /* If R15 was used as Rn, hence current PC+8. */
10855 u_regval[0] = u_regval[0] + 8;
10856 }
10857 /* Calculate target store address, Rn +/- Rm, register offset. */
10858 if (12 == arm_insn_r->opcode)
10859 {
10860 tgt_mem_addr = u_regval[0] + u_regval[1];
10861 }
10862 else
10863 {
10864 tgt_mem_addr = u_regval[1] - u_regval[0];
10865 }
10866 if (ARM_RECORD_STRH == str_type)
10867 {
10868 record_buf_mem[0] = 2;
10869 record_buf_mem[1] = tgt_mem_addr;
10870 arm_insn_r->mem_rec_count = 1;
10871 }
10872 else if (ARM_RECORD_STRD == str_type)
10873 {
10874 record_buf_mem[0] = 4;
10875 record_buf_mem[1] = tgt_mem_addr;
10876 record_buf_mem[2] = 4;
10877 record_buf_mem[3] = tgt_mem_addr + 4;
10878 arm_insn_r->mem_rec_count = 2;
10879 }
10880 }
10881 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10882 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10883 {
10884 /* 3) Store, immediate pre-indexed. */
10885 /* 5) Store, immediate post-indexed. */
10886 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10887 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10888 offset_8 = (immed_high << 4) | immed_low;
10889 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10890 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10891 /* Calculate target store address, Rn +/- Rm, register offset. */
10892 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10893 {
10894 tgt_mem_addr = u_regval[0] + offset_8;
10895 }
10896 else
10897 {
10898 tgt_mem_addr = u_regval[0] - offset_8;
10899 }
10900 if (ARM_RECORD_STRH == str_type)
10901 {
10902 record_buf_mem[0] = 2;
10903 record_buf_mem[1] = tgt_mem_addr;
10904 arm_insn_r->mem_rec_count = 1;
10905 }
10906 else if (ARM_RECORD_STRD == str_type)
10907 {
10908 record_buf_mem[0] = 4;
10909 record_buf_mem[1] = tgt_mem_addr;
10910 record_buf_mem[2] = 4;
10911 record_buf_mem[3] = tgt_mem_addr + 4;
10912 arm_insn_r->mem_rec_count = 2;
10913 }
10914 /* Record Rn also as it changes. */
10915 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10916 arm_insn_r->reg_rec_count = 1;
10917 }
10918 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10919 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10920 {
10921 /* 4) Store, register pre-indexed. */
10922 /* 6) Store, register post -indexed. */
10923 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10924 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10925 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10926 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10927 /* Calculate target store address, Rn +/- Rm, register offset. */
10928 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10929 {
10930 tgt_mem_addr = u_regval[0] + u_regval[1];
10931 }
10932 else
10933 {
10934 tgt_mem_addr = u_regval[1] - u_regval[0];
10935 }
10936 if (ARM_RECORD_STRH == str_type)
10937 {
10938 record_buf_mem[0] = 2;
10939 record_buf_mem[1] = tgt_mem_addr;
10940 arm_insn_r->mem_rec_count = 1;
10941 }
10942 else if (ARM_RECORD_STRD == str_type)
10943 {
10944 record_buf_mem[0] = 4;
10945 record_buf_mem[1] = tgt_mem_addr;
10946 record_buf_mem[2] = 4;
10947 record_buf_mem[3] = tgt_mem_addr + 4;
10948 arm_insn_r->mem_rec_count = 2;
10949 }
10950 /* Record Rn also as it changes. */
10951 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10952 arm_insn_r->reg_rec_count = 1;
10953 }
10954 return 0;
10955 }
10956
10957 /* Handling ARM extension space insns. */
10958
10959 static int
10960 arm_record_extension_space (insn_decode_record *arm_insn_r)
10961 {
10962 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10963 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10964 uint32_t record_buf[8], record_buf_mem[8];
10965 uint32_t reg_src1 = 0;
10966 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10967 struct regcache *reg_cache = arm_insn_r->regcache;
10968 ULONGEST u_regval = 0;
10969
10970 gdb_assert (!INSN_RECORDED(arm_insn_r));
10971 /* Handle unconditional insn extension space. */
10972
10973 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10974 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10975 if (arm_insn_r->cond)
10976 {
10977 /* PLD has no affect on architectural state, it just affects
10978 the caches. */
10979 if (5 == ((opcode1 & 0xE0) >> 5))
10980 {
10981 /* BLX(1) */
10982 record_buf[0] = ARM_PS_REGNUM;
10983 record_buf[1] = ARM_LR_REGNUM;
10984 arm_insn_r->reg_rec_count = 2;
10985 }
10986 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10987 }
10988
10989
10990 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10991 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10992 {
10993 ret = -1;
10994 /* Undefined instruction on ARM V5; need to handle if later
10995 versions define it. */
10996 }
10997
10998 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10999 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11000 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11001
11002 /* Handle arithmetic insn extension space. */
11003 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11004 && !INSN_RECORDED(arm_insn_r))
11005 {
11006 /* Handle MLA(S) and MUL(S). */
11007 if (0 <= insn_op1 && 3 >= insn_op1)
11008 {
11009 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11010 record_buf[1] = ARM_PS_REGNUM;
11011 arm_insn_r->reg_rec_count = 2;
11012 }
11013 else if (4 <= insn_op1 && 15 >= insn_op1)
11014 {
11015 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11016 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11017 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11018 record_buf[2] = ARM_PS_REGNUM;
11019 arm_insn_r->reg_rec_count = 3;
11020 }
11021 }
11022
11023 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11024 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11025 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11026
11027 /* Handle control insn extension space. */
11028
11029 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11030 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11031 {
11032 if (!bit (arm_insn_r->arm_insn,25))
11033 {
11034 if (!bits (arm_insn_r->arm_insn, 4, 7))
11035 {
11036 if ((0 == insn_op1) || (2 == insn_op1))
11037 {
11038 /* MRS. */
11039 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11040 arm_insn_r->reg_rec_count = 1;
11041 }
11042 else if (1 == insn_op1)
11043 {
11044 /* CSPR is going to be changed. */
11045 record_buf[0] = ARM_PS_REGNUM;
11046 arm_insn_r->reg_rec_count = 1;
11047 }
11048 else if (3 == insn_op1)
11049 {
11050 /* SPSR is going to be changed. */
11051 /* We need to get SPSR value, which is yet to be done. */
11052 printf_unfiltered (_("Process record does not support "
11053 "instruction 0x%0x at address %s.\n"),
11054 arm_insn_r->arm_insn,
11055 paddress (arm_insn_r->gdbarch,
11056 arm_insn_r->this_addr));
11057 return -1;
11058 }
11059 }
11060 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11061 {
11062 if (1 == insn_op1)
11063 {
11064 /* BX. */
11065 record_buf[0] = ARM_PS_REGNUM;
11066 arm_insn_r->reg_rec_count = 1;
11067 }
11068 else if (3 == insn_op1)
11069 {
11070 /* CLZ. */
11071 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11072 arm_insn_r->reg_rec_count = 1;
11073 }
11074 }
11075 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11076 {
11077 /* BLX. */
11078 record_buf[0] = ARM_PS_REGNUM;
11079 record_buf[1] = ARM_LR_REGNUM;
11080 arm_insn_r->reg_rec_count = 2;
11081 }
11082 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11083 {
11084 /* QADD, QSUB, QDADD, QDSUB */
11085 record_buf[0] = ARM_PS_REGNUM;
11086 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11087 arm_insn_r->reg_rec_count = 2;
11088 }
11089 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11090 {
11091 /* BKPT. */
11092 record_buf[0] = ARM_PS_REGNUM;
11093 record_buf[1] = ARM_LR_REGNUM;
11094 arm_insn_r->reg_rec_count = 2;
11095
11096 /* Save SPSR also;how? */
11097 printf_unfiltered (_("Process record does not support "
11098 "instruction 0x%0x at address %s.\n"),
11099 arm_insn_r->arm_insn,
11100 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11101 return -1;
11102 }
11103 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11104 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11105 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11106 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11107 )
11108 {
11109 if (0 == insn_op1 || 1 == insn_op1)
11110 {
11111 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11112 /* We dont do optimization for SMULW<y> where we
11113 need only Rd. */
11114 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11115 record_buf[1] = ARM_PS_REGNUM;
11116 arm_insn_r->reg_rec_count = 2;
11117 }
11118 else if (2 == insn_op1)
11119 {
11120 /* SMLAL<x><y>. */
11121 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11122 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11123 arm_insn_r->reg_rec_count = 2;
11124 }
11125 else if (3 == insn_op1)
11126 {
11127 /* SMUL<x><y>. */
11128 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11129 arm_insn_r->reg_rec_count = 1;
11130 }
11131 }
11132 }
11133 else
11134 {
11135 /* MSR : immediate form. */
11136 if (1 == insn_op1)
11137 {
11138 /* CSPR is going to be changed. */
11139 record_buf[0] = ARM_PS_REGNUM;
11140 arm_insn_r->reg_rec_count = 1;
11141 }
11142 else if (3 == insn_op1)
11143 {
11144 /* SPSR is going to be changed. */
11145 /* we need to get SPSR value, which is yet to be done */
11146 printf_unfiltered (_("Process record does not support "
11147 "instruction 0x%0x at address %s.\n"),
11148 arm_insn_r->arm_insn,
11149 paddress (arm_insn_r->gdbarch,
11150 arm_insn_r->this_addr));
11151 return -1;
11152 }
11153 }
11154 }
11155
11156 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11157 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11158 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11159
11160 /* Handle load/store insn extension space. */
11161
11162 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11163 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11164 && !INSN_RECORDED(arm_insn_r))
11165 {
11166 /* SWP/SWPB. */
11167 if (0 == insn_op1)
11168 {
11169 /* These insn, changes register and memory as well. */
11170 /* SWP or SWPB insn. */
11171 /* Get memory address given by Rn. */
11172 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11173 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11174 /* SWP insn ?, swaps word. */
11175 if (8 == arm_insn_r->opcode)
11176 {
11177 record_buf_mem[0] = 4;
11178 }
11179 else
11180 {
11181 /* SWPB insn, swaps only byte. */
11182 record_buf_mem[0] = 1;
11183 }
11184 record_buf_mem[1] = u_regval;
11185 arm_insn_r->mem_rec_count = 1;
11186 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11187 arm_insn_r->reg_rec_count = 1;
11188 }
11189 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11190 {
11191 /* STRH. */
11192 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11193 ARM_RECORD_STRH);
11194 }
11195 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11196 {
11197 /* LDRD. */
11198 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11199 record_buf[1] = record_buf[0] + 1;
11200 arm_insn_r->reg_rec_count = 2;
11201 }
11202 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11203 {
11204 /* STRD. */
11205 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11206 ARM_RECORD_STRD);
11207 }
11208 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11209 {
11210 /* LDRH, LDRSB, LDRSH. */
11211 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11212 arm_insn_r->reg_rec_count = 1;
11213 }
11214
11215 }
11216
11217 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11218 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11219 && !INSN_RECORDED(arm_insn_r))
11220 {
11221 ret = -1;
11222 /* Handle coprocessor insn extension space. */
11223 }
11224
11225 /* To be done for ARMv5 and later; as of now we return -1. */
11226 if (-1 == ret)
11227 printf_unfiltered (_("Process record does not support instruction x%0x "
11228 "at address %s.\n"),arm_insn_r->arm_insn,
11229 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11230
11231
11232 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11233 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11234
11235 return ret;
11236 }
11237
11238 /* Handling opcode 000 insns. */
11239
11240 static int
11241 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11242 {
11243 struct regcache *reg_cache = arm_insn_r->regcache;
11244 uint32_t record_buf[8], record_buf_mem[8];
11245 ULONGEST u_regval[2] = {0};
11246
11247 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11248 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11249 uint32_t opcode1 = 0;
11250
11251 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11252 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11253 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11254
11255 /* Data processing insn /multiply insn. */
11256 if (9 == arm_insn_r->decode
11257 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11258 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11259 {
11260 /* Handle multiply instructions. */
11261 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11262 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11263 {
11264 /* Handle MLA and MUL. */
11265 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11266 record_buf[1] = ARM_PS_REGNUM;
11267 arm_insn_r->reg_rec_count = 2;
11268 }
11269 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11270 {
11271 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11272 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11273 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11274 record_buf[2] = ARM_PS_REGNUM;
11275 arm_insn_r->reg_rec_count = 3;
11276 }
11277 }
11278 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11279 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11280 {
11281 /* Handle misc load insns, as 20th bit (L = 1). */
11282 /* LDR insn has a capability to do branching, if
11283 MOV LR, PC is precceded by LDR insn having Rn as R15
11284 in that case, it emulates branch and link insn, and hence we
11285 need to save CSPR and PC as well. I am not sure this is right
11286 place; as opcode = 010 LDR insn make this happen, if R15 was
11287 used. */
11288 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11289 if (15 != reg_dest)
11290 {
11291 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11292 arm_insn_r->reg_rec_count = 1;
11293 }
11294 else
11295 {
11296 record_buf[0] = reg_dest;
11297 record_buf[1] = ARM_PS_REGNUM;
11298 arm_insn_r->reg_rec_count = 2;
11299 }
11300 }
11301 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11302 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11303 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11304 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11305 {
11306 /* Handle MSR insn. */
11307 if (9 == arm_insn_r->opcode)
11308 {
11309 /* CSPR is going to be changed. */
11310 record_buf[0] = ARM_PS_REGNUM;
11311 arm_insn_r->reg_rec_count = 1;
11312 }
11313 else
11314 {
11315 /* SPSR is going to be changed. */
11316 /* How to read SPSR value? */
11317 printf_unfiltered (_("Process record does not support instruction "
11318 "0x%0x at address %s.\n"),
11319 arm_insn_r->arm_insn,
11320 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11321 return -1;
11322 }
11323 }
11324 else if (9 == arm_insn_r->decode
11325 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11326 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11327 {
11328 /* Handling SWP, SWPB. */
11329 /* These insn, changes register and memory as well. */
11330 /* SWP or SWPB insn. */
11331
11332 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11333 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11334 /* SWP insn ?, swaps word. */
11335 if (8 == arm_insn_r->opcode)
11336 {
11337 record_buf_mem[0] = 4;
11338 }
11339 else
11340 {
11341 /* SWPB insn, swaps only byte. */
11342 record_buf_mem[0] = 1;
11343 }
11344 record_buf_mem[1] = u_regval[0];
11345 arm_insn_r->mem_rec_count = 1;
11346 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11347 arm_insn_r->reg_rec_count = 1;
11348 }
11349 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11350 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11351 {
11352 /* Handle BLX, branch and link/exchange. */
11353 if (9 == arm_insn_r->opcode)
11354 {
11355 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11356 and R14 stores the return address. */
11357 record_buf[0] = ARM_PS_REGNUM;
11358 record_buf[1] = ARM_LR_REGNUM;
11359 arm_insn_r->reg_rec_count = 2;
11360 }
11361 }
11362 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11363 {
11364 /* Handle enhanced software breakpoint insn, BKPT. */
11365 /* CPSR is changed to be executed in ARM state, disabling normal
11366 interrupts, entering abort mode. */
11367 /* According to high vector configuration PC is set. */
11368 /* user hit breakpoint and type reverse, in
11369 that case, we need to go back with previous CPSR and
11370 Program Counter. */
11371 record_buf[0] = ARM_PS_REGNUM;
11372 record_buf[1] = ARM_LR_REGNUM;
11373 arm_insn_r->reg_rec_count = 2;
11374
11375 /* Save SPSR also; how? */
11376 printf_unfiltered (_("Process record does not support instruction "
11377 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11378 paddress (arm_insn_r->gdbarch,
11379 arm_insn_r->this_addr));
11380 return -1;
11381 }
11382 else if (11 == arm_insn_r->decode
11383 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11384 {
11385 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11386
11387 /* Handle str(x) insn */
11388 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11389 ARM_RECORD_STRH);
11390 }
11391 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11392 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11393 {
11394 /* Handle BX, branch and link/exchange. */
11395 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11396 record_buf[0] = ARM_PS_REGNUM;
11397 arm_insn_r->reg_rec_count = 1;
11398 }
11399 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11400 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11401 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11402 {
11403 /* Count leading zeros: CLZ. */
11404 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11405 arm_insn_r->reg_rec_count = 1;
11406 }
11407 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11408 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11409 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11410 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11411 )
11412 {
11413 /* Handle MRS insn. */
11414 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11415 arm_insn_r->reg_rec_count = 1;
11416 }
11417 else if (arm_insn_r->opcode <= 15)
11418 {
11419 /* Normal data processing insns. */
11420 /* Out of 11 shifter operands mode, all the insn modifies destination
11421 register, which is specified by 13-16 decode. */
11422 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11423 record_buf[1] = ARM_PS_REGNUM;
11424 arm_insn_r->reg_rec_count = 2;
11425 }
11426 else
11427 {
11428 return -1;
11429 }
11430
11431 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11432 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11433 return 0;
11434 }
11435
11436 /* Handling opcode 001 insns. */
11437
11438 static int
11439 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11440 {
11441 uint32_t record_buf[8], record_buf_mem[8];
11442
11443 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11444 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11445
11446 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11447 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11448 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11449 )
11450 {
11451 /* Handle MSR insn. */
11452 if (9 == arm_insn_r->opcode)
11453 {
11454 /* CSPR is going to be changed. */
11455 record_buf[0] = ARM_PS_REGNUM;
11456 arm_insn_r->reg_rec_count = 1;
11457 }
11458 else
11459 {
11460 /* SPSR is going to be changed. */
11461 }
11462 }
11463 else if (arm_insn_r->opcode <= 15)
11464 {
11465 /* Normal data processing insns. */
11466 /* Out of 11 shifter operands mode, all the insn modifies destination
11467 register, which is specified by 13-16 decode. */
11468 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11469 record_buf[1] = ARM_PS_REGNUM;
11470 arm_insn_r->reg_rec_count = 2;
11471 }
11472 else
11473 {
11474 return -1;
11475 }
11476
11477 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11478 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11479 return 0;
11480 }
11481
11482 /* Handling opcode 010 insns. */
11483
11484 static int
11485 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11486 {
11487 struct regcache *reg_cache = arm_insn_r->regcache;
11488
11489 uint32_t reg_src1 = 0 , reg_dest = 0;
11490 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11491 uint32_t record_buf[8], record_buf_mem[8];
11492
11493 ULONGEST u_regval = 0;
11494
11495 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11496 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11497
11498 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11499 {
11500 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11501 /* LDR insn has a capability to do branching, if
11502 MOV LR, PC is precedded by LDR insn having Rn as R15
11503 in that case, it emulates branch and link insn, and hence we
11504 need to save CSPR and PC as well. */
11505 if (ARM_PC_REGNUM != reg_dest)
11506 {
11507 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11508 arm_insn_r->reg_rec_count = 1;
11509 }
11510 else
11511 {
11512 record_buf[0] = reg_dest;
11513 record_buf[1] = ARM_PS_REGNUM;
11514 arm_insn_r->reg_rec_count = 2;
11515 }
11516 }
11517 else
11518 {
11519 /* Store, immediate offset, immediate pre-indexed,
11520 immediate post-indexed. */
11521 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11522 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11523 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11524 /* U == 1 */
11525 if (bit (arm_insn_r->arm_insn, 23))
11526 {
11527 tgt_mem_addr = u_regval + offset_12;
11528 }
11529 else
11530 {
11531 tgt_mem_addr = u_regval - offset_12;
11532 }
11533
11534 switch (arm_insn_r->opcode)
11535 {
11536 /* STR. */
11537 case 8:
11538 case 12:
11539 /* STR. */
11540 case 9:
11541 case 13:
11542 /* STRT. */
11543 case 1:
11544 case 5:
11545 /* STR. */
11546 case 4:
11547 case 0:
11548 record_buf_mem[0] = 4;
11549 break;
11550
11551 /* STRB. */
11552 case 10:
11553 case 14:
11554 /* STRB. */
11555 case 11:
11556 case 15:
11557 /* STRBT. */
11558 case 3:
11559 case 7:
11560 /* STRB. */
11561 case 2:
11562 case 6:
11563 record_buf_mem[0] = 1;
11564 break;
11565
11566 default:
11567 gdb_assert_not_reached ("no decoding pattern found");
11568 break;
11569 }
11570 record_buf_mem[1] = tgt_mem_addr;
11571 arm_insn_r->mem_rec_count = 1;
11572
11573 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11574 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11575 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11576 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11577 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11578 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11579 )
11580 {
11581 /* We are handling pre-indexed mode; post-indexed mode;
11582 where Rn is going to be changed. */
11583 record_buf[0] = reg_src1;
11584 arm_insn_r->reg_rec_count = 1;
11585 }
11586 }
11587
11588 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11589 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11590 return 0;
11591 }
11592
11593 /* Handling opcode 011 insns. */
11594
11595 static int
11596 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11597 {
11598 struct regcache *reg_cache = arm_insn_r->regcache;
11599
11600 uint32_t shift_imm = 0;
11601 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11602 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11603 uint32_t record_buf[8], record_buf_mem[8];
11604
11605 LONGEST s_word;
11606 ULONGEST u_regval[2];
11607
11608 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11609 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11610
11611 /* Handle enhanced store insns and LDRD DSP insn,
11612 order begins according to addressing modes for store insns
11613 STRH insn. */
11614
11615 /* LDR or STR? */
11616 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11617 {
11618 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11619 /* LDR insn has a capability to do branching, if
11620 MOV LR, PC is precedded by LDR insn having Rn as R15
11621 in that case, it emulates branch and link insn, and hence we
11622 need to save CSPR and PC as well. */
11623 if (15 != reg_dest)
11624 {
11625 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11626 arm_insn_r->reg_rec_count = 1;
11627 }
11628 else
11629 {
11630 record_buf[0] = reg_dest;
11631 record_buf[1] = ARM_PS_REGNUM;
11632 arm_insn_r->reg_rec_count = 2;
11633 }
11634 }
11635 else
11636 {
11637 if (! bits (arm_insn_r->arm_insn, 4, 11))
11638 {
11639 /* Store insn, register offset and register pre-indexed,
11640 register post-indexed. */
11641 /* Get Rm. */
11642 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11643 /* Get Rn. */
11644 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11645 regcache_raw_read_unsigned (reg_cache, reg_src1
11646 , &u_regval[0]);
11647 regcache_raw_read_unsigned (reg_cache, reg_src2
11648 , &u_regval[1]);
11649 if (15 == reg_src2)
11650 {
11651 /* If R15 was used as Rn, hence current PC+8. */
11652 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11653 u_regval[0] = u_regval[0] + 8;
11654 }
11655 /* Calculate target store address, Rn +/- Rm, register offset. */
11656 /* U == 1. */
11657 if (bit (arm_insn_r->arm_insn, 23))
11658 {
11659 tgt_mem_addr = u_regval[0] + u_regval[1];
11660 }
11661 else
11662 {
11663 tgt_mem_addr = u_regval[1] - u_regval[0];
11664 }
11665
11666 switch (arm_insn_r->opcode)
11667 {
11668 /* STR. */
11669 case 8:
11670 case 12:
11671 /* STR. */
11672 case 9:
11673 case 13:
11674 /* STRT. */
11675 case 1:
11676 case 5:
11677 /* STR. */
11678 case 0:
11679 case 4:
11680 record_buf_mem[0] = 4;
11681 break;
11682
11683 /* STRB. */
11684 case 10:
11685 case 14:
11686 /* STRB. */
11687 case 11:
11688 case 15:
11689 /* STRBT. */
11690 case 3:
11691 case 7:
11692 /* STRB. */
11693 case 2:
11694 case 6:
11695 record_buf_mem[0] = 1;
11696 break;
11697
11698 default:
11699 gdb_assert_not_reached ("no decoding pattern found");
11700 break;
11701 }
11702 record_buf_mem[1] = tgt_mem_addr;
11703 arm_insn_r->mem_rec_count = 1;
11704
11705 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11706 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11707 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11708 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11709 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11710 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11711 )
11712 {
11713 /* Rn is going to be changed in pre-indexed mode and
11714 post-indexed mode as well. */
11715 record_buf[0] = reg_src2;
11716 arm_insn_r->reg_rec_count = 1;
11717 }
11718 }
11719 else
11720 {
11721 /* Store insn, scaled register offset; scaled pre-indexed. */
11722 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11723 /* Get Rm. */
11724 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11725 /* Get Rn. */
11726 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11727 /* Get shift_imm. */
11728 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11729 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11730 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11731 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11732 /* Offset_12 used as shift. */
11733 switch (offset_12)
11734 {
11735 case 0:
11736 /* Offset_12 used as index. */
11737 offset_12 = u_regval[0] << shift_imm;
11738 break;
11739
11740 case 1:
11741 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11742 break;
11743
11744 case 2:
11745 if (!shift_imm)
11746 {
11747 if (bit (u_regval[0], 31))
11748 {
11749 offset_12 = 0xFFFFFFFF;
11750 }
11751 else
11752 {
11753 offset_12 = 0;
11754 }
11755 }
11756 else
11757 {
11758 /* This is arithmetic shift. */
11759 offset_12 = s_word >> shift_imm;
11760 }
11761 break;
11762
11763 case 3:
11764 if (!shift_imm)
11765 {
11766 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11767 &u_regval[1]);
11768 /* Get C flag value and shift it by 31. */
11769 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11770 | (u_regval[0]) >> 1);
11771 }
11772 else
11773 {
11774 offset_12 = (u_regval[0] >> shift_imm) \
11775 | (u_regval[0] <<
11776 (sizeof(uint32_t) - shift_imm));
11777 }
11778 break;
11779
11780 default:
11781 gdb_assert_not_reached ("no decoding pattern found");
11782 break;
11783 }
11784
11785 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11786 /* bit U set. */
11787 if (bit (arm_insn_r->arm_insn, 23))
11788 {
11789 tgt_mem_addr = u_regval[1] + offset_12;
11790 }
11791 else
11792 {
11793 tgt_mem_addr = u_regval[1] - offset_12;
11794 }
11795
11796 switch (arm_insn_r->opcode)
11797 {
11798 /* STR. */
11799 case 8:
11800 case 12:
11801 /* STR. */
11802 case 9:
11803 case 13:
11804 /* STRT. */
11805 case 1:
11806 case 5:
11807 /* STR. */
11808 case 0:
11809 case 4:
11810 record_buf_mem[0] = 4;
11811 break;
11812
11813 /* STRB. */
11814 case 10:
11815 case 14:
11816 /* STRB. */
11817 case 11:
11818 case 15:
11819 /* STRBT. */
11820 case 3:
11821 case 7:
11822 /* STRB. */
11823 case 2:
11824 case 6:
11825 record_buf_mem[0] = 1;
11826 break;
11827
11828 default:
11829 gdb_assert_not_reached ("no decoding pattern found");
11830 break;
11831 }
11832 record_buf_mem[1] = tgt_mem_addr;
11833 arm_insn_r->mem_rec_count = 1;
11834
11835 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11836 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11837 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11838 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11839 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11840 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11841 )
11842 {
11843 /* Rn is going to be changed in register scaled pre-indexed
11844 mode,and scaled post indexed mode. */
11845 record_buf[0] = reg_src2;
11846 arm_insn_r->reg_rec_count = 1;
11847 }
11848 }
11849 }
11850
11851 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11852 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11853 return 0;
11854 }
11855
11856 /* Handling opcode 100 insns. */
11857
11858 static int
11859 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11860 {
11861 struct regcache *reg_cache = arm_insn_r->regcache;
11862
11863 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11864 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11865 uint32_t start_address = 0, index = 0;
11866 uint32_t record_buf[24], record_buf_mem[48];
11867
11868 ULONGEST u_regval[2] = {0};
11869
11870 /* This mode is exclusively for load and store multiple. */
11871 /* Handle incremenrt after/before and decrment after.before mode;
11872 Rn is changing depending on W bit, but as of now we store Rn too
11873 without optimization. */
11874
11875 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11876 {
11877 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11878
11879 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11880 {
11881 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11882 no_of_regs = 15;
11883 }
11884 else
11885 {
11886 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11887 no_of_regs = 14;
11888 }
11889 /* Get Rn. */
11890 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11891 while (register_bits)
11892 {
11893 if (register_bits & 0x00000001)
11894 record_buf[index++] = register_count;
11895 register_bits = register_bits >> 1;
11896 register_count++;
11897 }
11898
11899 /* Extra space for Base Register and CPSR; wihtout optimization. */
11900 record_buf[index++] = reg_src1;
11901 record_buf[index++] = ARM_PS_REGNUM;
11902 arm_insn_r->reg_rec_count = index;
11903 }
11904 else
11905 {
11906 /* It handles both STM(1) and STM(2). */
11907 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11908
11909 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11910 /* Get Rn. */
11911 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11912 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11913 while (register_bits)
11914 {
11915 if (register_bits & 0x00000001)
11916 register_count++;
11917 register_bits = register_bits >> 1;
11918 }
11919
11920 switch (addr_mode)
11921 {
11922 /* Decrement after. */
11923 case 0:
11924 start_address = (u_regval[0]) - (register_count * 4) + 4;
11925 arm_insn_r->mem_rec_count = register_count;
11926 while (register_count)
11927 {
11928 record_buf_mem[(register_count * 2) - 1] = start_address;
11929 record_buf_mem[(register_count * 2) - 2] = 4;
11930 start_address = start_address + 4;
11931 register_count--;
11932 }
11933 break;
11934
11935 /* Increment after. */
11936 case 1:
11937 start_address = u_regval[0];
11938 arm_insn_r->mem_rec_count = register_count;
11939 while (register_count)
11940 {
11941 record_buf_mem[(register_count * 2) - 1] = start_address;
11942 record_buf_mem[(register_count * 2) - 2] = 4;
11943 start_address = start_address + 4;
11944 register_count--;
11945 }
11946 break;
11947
11948 /* Decrement before. */
11949 case 2:
11950
11951 start_address = (u_regval[0]) - (register_count * 4);
11952 arm_insn_r->mem_rec_count = register_count;
11953 while (register_count)
11954 {
11955 record_buf_mem[(register_count * 2) - 1] = start_address;
11956 record_buf_mem[(register_count * 2) - 2] = 4;
11957 start_address = start_address + 4;
11958 register_count--;
11959 }
11960 break;
11961
11962 /* Increment before. */
11963 case 3:
11964 start_address = u_regval[0] + 4;
11965 arm_insn_r->mem_rec_count = register_count;
11966 while (register_count)
11967 {
11968 record_buf_mem[(register_count * 2) - 1] = start_address;
11969 record_buf_mem[(register_count * 2) - 2] = 4;
11970 start_address = start_address + 4;
11971 register_count--;
11972 }
11973 break;
11974
11975 default:
11976 gdb_assert_not_reached ("no decoding pattern found");
11977 break;
11978 }
11979
11980 /* Base register also changes; based on condition and W bit. */
11981 /* We save it anyway without optimization. */
11982 record_buf[0] = reg_src1;
11983 arm_insn_r->reg_rec_count = 1;
11984 }
11985
11986 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11987 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11988 return 0;
11989 }
11990
11991 /* Handling opcode 101 insns. */
11992
11993 static int
11994 arm_record_b_bl (insn_decode_record *arm_insn_r)
11995 {
11996 uint32_t record_buf[8];
11997
11998 /* Handle B, BL, BLX(1) insns. */
11999 /* B simply branches so we do nothing here. */
12000 /* Note: BLX(1) doesnt fall here but instead it falls into
12001 extension space. */
12002 if (bit (arm_insn_r->arm_insn, 24))
12003 {
12004 record_buf[0] = ARM_LR_REGNUM;
12005 arm_insn_r->reg_rec_count = 1;
12006 }
12007
12008 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12009
12010 return 0;
12011 }
12012
12013 /* Handling opcode 110 insns. */
12014
12015 static int
12016 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
12017 {
12018 printf_unfiltered (_("Process record does not support instruction "
12019 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12020 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12021
12022 return -1;
12023 }
12024
12025 /* Handling opcode 111 insns. */
12026
12027 static int
12028 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12029 {
12030 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12031 struct regcache *reg_cache = arm_insn_r->regcache;
12032 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12033 ULONGEST u_regval = 0;
12034
12035 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12036
12037 /* Handle arm SWI/SVC system call instructions. */
12038 if (15 == arm_insn_r->opcode)
12039 {
12040 if (tdep->arm_syscall_record != NULL)
12041 {
12042 ULONGEST svc_operand, svc_number;
12043
12044 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12045
12046 if (svc_operand) /* OABI. */
12047 svc_number = svc_operand - 0x900000;
12048 else /* EABI. */
12049 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12050
12051 ret = tdep->arm_syscall_record (reg_cache, svc_number);
12052 }
12053 else
12054 {
12055 printf_unfiltered (_("no syscall record support\n"));
12056 ret = -1;
12057 }
12058 }
12059 else
12060 {
12061 arm_record_unsupported_insn (arm_insn_r);
12062 ret = -1;
12063 }
12064
12065 return ret;
12066 }
12067
12068 /* Handling opcode 000 insns. */
12069
12070 static int
12071 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12072 {
12073 uint32_t record_buf[8];
12074 uint32_t reg_src1 = 0;
12075
12076 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12077
12078 record_buf[0] = ARM_PS_REGNUM;
12079 record_buf[1] = reg_src1;
12080 thumb_insn_r->reg_rec_count = 2;
12081
12082 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12083
12084 return 0;
12085 }
12086
12087
12088 /* Handling opcode 001 insns. */
12089
12090 static int
12091 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12092 {
12093 uint32_t record_buf[8];
12094 uint32_t reg_src1 = 0;
12095
12096 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12097
12098 record_buf[0] = ARM_PS_REGNUM;
12099 record_buf[1] = reg_src1;
12100 thumb_insn_r->reg_rec_count = 2;
12101
12102 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12103
12104 return 0;
12105 }
12106
12107 /* Handling opcode 010 insns. */
12108
12109 static int
12110 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12111 {
12112 struct regcache *reg_cache = thumb_insn_r->regcache;
12113 uint32_t record_buf[8], record_buf_mem[8];
12114
12115 uint32_t reg_src1 = 0, reg_src2 = 0;
12116 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12117
12118 ULONGEST u_regval[2] = {0};
12119
12120 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12121
12122 if (bit (thumb_insn_r->arm_insn, 12))
12123 {
12124 /* Handle load/store register offset. */
12125 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12126 if (opcode2 >= 12 && opcode2 <= 15)
12127 {
12128 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12129 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12130 record_buf[0] = reg_src1;
12131 thumb_insn_r->reg_rec_count = 1;
12132 }
12133 else if (opcode2 >= 8 && opcode2 <= 10)
12134 {
12135 /* STR(2), STRB(2), STRH(2) . */
12136 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12137 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12138 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12139 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12140 if (8 == opcode2)
12141 record_buf_mem[0] = 4; /* STR (2). */
12142 else if (10 == opcode2)
12143 record_buf_mem[0] = 1; /* STRB (2). */
12144 else if (9 == opcode2)
12145 record_buf_mem[0] = 2; /* STRH (2). */
12146 record_buf_mem[1] = u_regval[0] + u_regval[1];
12147 thumb_insn_r->mem_rec_count = 1;
12148 }
12149 }
12150 else if (bit (thumb_insn_r->arm_insn, 11))
12151 {
12152 /* Handle load from literal pool. */
12153 /* LDR(3). */
12154 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12155 record_buf[0] = reg_src1;
12156 thumb_insn_r->reg_rec_count = 1;
12157 }
12158 else if (opcode1)
12159 {
12160 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12161 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12162 if ((3 == opcode2) && (!opcode3))
12163 {
12164 /* Branch with exchange. */
12165 record_buf[0] = ARM_PS_REGNUM;
12166 thumb_insn_r->reg_rec_count = 1;
12167 }
12168 else
12169 {
12170 /* Format 8; special data processing insns. */
12171 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12172 record_buf[0] = ARM_PS_REGNUM;
12173 record_buf[1] = reg_src1;
12174 thumb_insn_r->reg_rec_count = 2;
12175 }
12176 }
12177 else
12178 {
12179 /* Format 5; data processing insns. */
12180 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12181 if (bit (thumb_insn_r->arm_insn, 7))
12182 {
12183 reg_src1 = reg_src1 + 8;
12184 }
12185 record_buf[0] = ARM_PS_REGNUM;
12186 record_buf[1] = reg_src1;
12187 thumb_insn_r->reg_rec_count = 2;
12188 }
12189
12190 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12191 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12192 record_buf_mem);
12193
12194 return 0;
12195 }
12196
12197 /* Handling opcode 001 insns. */
12198
12199 static int
12200 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12201 {
12202 struct regcache *reg_cache = thumb_insn_r->regcache;
12203 uint32_t record_buf[8], record_buf_mem[8];
12204
12205 uint32_t reg_src1 = 0;
12206 uint32_t opcode = 0, immed_5 = 0;
12207
12208 ULONGEST u_regval = 0;
12209
12210 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12211
12212 if (opcode)
12213 {
12214 /* LDR(1). */
12215 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12216 record_buf[0] = reg_src1;
12217 thumb_insn_r->reg_rec_count = 1;
12218 }
12219 else
12220 {
12221 /* STR(1). */
12222 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12223 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12224 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12225 record_buf_mem[0] = 4;
12226 record_buf_mem[1] = u_regval + (immed_5 * 4);
12227 thumb_insn_r->mem_rec_count = 1;
12228 }
12229
12230 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12231 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12232 record_buf_mem);
12233
12234 return 0;
12235 }
12236
12237 /* Handling opcode 100 insns. */
12238
12239 static int
12240 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12241 {
12242 struct regcache *reg_cache = thumb_insn_r->regcache;
12243 uint32_t record_buf[8], record_buf_mem[8];
12244
12245 uint32_t reg_src1 = 0;
12246 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12247
12248 ULONGEST u_regval = 0;
12249
12250 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12251
12252 if (3 == opcode)
12253 {
12254 /* LDR(4). */
12255 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12256 record_buf[0] = reg_src1;
12257 thumb_insn_r->reg_rec_count = 1;
12258 }
12259 else if (1 == opcode)
12260 {
12261 /* LDRH(1). */
12262 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12263 record_buf[0] = reg_src1;
12264 thumb_insn_r->reg_rec_count = 1;
12265 }
12266 else if (2 == opcode)
12267 {
12268 /* STR(3). */
12269 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12270 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12271 record_buf_mem[0] = 4;
12272 record_buf_mem[1] = u_regval + (immed_8 * 4);
12273 thumb_insn_r->mem_rec_count = 1;
12274 }
12275 else if (0 == opcode)
12276 {
12277 /* STRH(1). */
12278 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12279 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12280 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12281 record_buf_mem[0] = 2;
12282 record_buf_mem[1] = u_regval + (immed_5 * 2);
12283 thumb_insn_r->mem_rec_count = 1;
12284 }
12285
12286 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12287 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12288 record_buf_mem);
12289
12290 return 0;
12291 }
12292
12293 /* Handling opcode 101 insns. */
12294
12295 static int
12296 thumb_record_misc (insn_decode_record *thumb_insn_r)
12297 {
12298 struct regcache *reg_cache = thumb_insn_r->regcache;
12299
12300 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12301 uint32_t register_bits = 0, register_count = 0;
12302 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12303 uint32_t record_buf[24], record_buf_mem[48];
12304 uint32_t reg_src1;
12305
12306 ULONGEST u_regval = 0;
12307
12308 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12309 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12310 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12311
12312 if (14 == opcode2)
12313 {
12314 /* POP. */
12315 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12316 while (register_bits)
12317 {
12318 if (register_bits & 0x00000001)
12319 record_buf[index++] = register_count;
12320 register_bits = register_bits >> 1;
12321 register_count++;
12322 }
12323 record_buf[index++] = ARM_PS_REGNUM;
12324 record_buf[index++] = ARM_SP_REGNUM;
12325 thumb_insn_r->reg_rec_count = index;
12326 }
12327 else if (10 == opcode2)
12328 {
12329 /* PUSH. */
12330 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12331 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12332 while (register_bits)
12333 {
12334 if (register_bits & 0x00000001)
12335 register_count++;
12336 register_bits = register_bits >> 1;
12337 }
12338 start_address = u_regval - \
12339 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12340 thumb_insn_r->mem_rec_count = register_count;
12341 while (register_count)
12342 {
12343 record_buf_mem[(register_count * 2) - 1] = start_address;
12344 record_buf_mem[(register_count * 2) - 2] = 4;
12345 start_address = start_address + 4;
12346 register_count--;
12347 }
12348 record_buf[0] = ARM_SP_REGNUM;
12349 thumb_insn_r->reg_rec_count = 1;
12350 }
12351 else if (0x1E == opcode1)
12352 {
12353 /* BKPT insn. */
12354 /* Handle enhanced software breakpoint insn, BKPT. */
12355 /* CPSR is changed to be executed in ARM state, disabling normal
12356 interrupts, entering abort mode. */
12357 /* According to high vector configuration PC is set. */
12358 /* User hits breakpoint and type reverse, in that case, we need to go back with
12359 previous CPSR and Program Counter. */
12360 record_buf[0] = ARM_PS_REGNUM;
12361 record_buf[1] = ARM_LR_REGNUM;
12362 thumb_insn_r->reg_rec_count = 2;
12363 /* We need to save SPSR value, which is not yet done. */
12364 printf_unfiltered (_("Process record does not support instruction "
12365 "0x%0x at address %s.\n"),
12366 thumb_insn_r->arm_insn,
12367 paddress (thumb_insn_r->gdbarch,
12368 thumb_insn_r->this_addr));
12369 return -1;
12370 }
12371 else if ((0 == opcode) || (1 == opcode))
12372 {
12373 /* ADD(5), ADD(6). */
12374 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12375 record_buf[0] = reg_src1;
12376 thumb_insn_r->reg_rec_count = 1;
12377 }
12378 else if (2 == opcode)
12379 {
12380 /* ADD(7), SUB(4). */
12381 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12382 record_buf[0] = ARM_SP_REGNUM;
12383 thumb_insn_r->reg_rec_count = 1;
12384 }
12385
12386 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12387 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12388 record_buf_mem);
12389
12390 return 0;
12391 }
12392
12393 /* Handling opcode 110 insns. */
12394
12395 static int
12396 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12397 {
12398 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12399 struct regcache *reg_cache = thumb_insn_r->regcache;
12400
12401 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12402 uint32_t reg_src1 = 0;
12403 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12404 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12405 uint32_t record_buf[24], record_buf_mem[48];
12406
12407 ULONGEST u_regval = 0;
12408
12409 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12410 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12411
12412 if (1 == opcode2)
12413 {
12414
12415 /* LDMIA. */
12416 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12417 /* Get Rn. */
12418 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12419 while (register_bits)
12420 {
12421 if (register_bits & 0x00000001)
12422 record_buf[index++] = register_count;
12423 register_bits = register_bits >> 1;
12424 register_count++;
12425 }
12426 record_buf[index++] = reg_src1;
12427 thumb_insn_r->reg_rec_count = index;
12428 }
12429 else if (0 == opcode2)
12430 {
12431 /* It handles both STMIA. */
12432 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12433 /* Get Rn. */
12434 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12435 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12436 while (register_bits)
12437 {
12438 if (register_bits & 0x00000001)
12439 register_count++;
12440 register_bits = register_bits >> 1;
12441 }
12442 start_address = u_regval;
12443 thumb_insn_r->mem_rec_count = register_count;
12444 while (register_count)
12445 {
12446 record_buf_mem[(register_count * 2) - 1] = start_address;
12447 record_buf_mem[(register_count * 2) - 2] = 4;
12448 start_address = start_address + 4;
12449 register_count--;
12450 }
12451 }
12452 else if (0x1F == opcode1)
12453 {
12454 /* Handle arm syscall insn. */
12455 if (tdep->arm_syscall_record != NULL)
12456 {
12457 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12458 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12459 }
12460 else
12461 {
12462 printf_unfiltered (_("no syscall record support\n"));
12463 return -1;
12464 }
12465 }
12466
12467 /* B (1), conditional branch is automatically taken care in process_record,
12468 as PC is saved there. */
12469
12470 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12471 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12472 record_buf_mem);
12473
12474 return ret;
12475 }
12476
12477 /* Handling opcode 111 insns. */
12478
12479 static int
12480 thumb_record_branch (insn_decode_record *thumb_insn_r)
12481 {
12482 uint32_t record_buf[8];
12483 uint32_t bits_h = 0;
12484
12485 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12486
12487 if (2 == bits_h || 3 == bits_h)
12488 {
12489 /* BL */
12490 record_buf[0] = ARM_LR_REGNUM;
12491 thumb_insn_r->reg_rec_count = 1;
12492 }
12493 else if (1 == bits_h)
12494 {
12495 /* BLX(1). */
12496 record_buf[0] = ARM_PS_REGNUM;
12497 record_buf[1] = ARM_LR_REGNUM;
12498 thumb_insn_r->reg_rec_count = 2;
12499 }
12500
12501 /* B(2) is automatically taken care in process_record, as PC is
12502 saved there. */
12503
12504 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12505
12506 return 0;
12507 }
12508
12509 /* Handler for thumb2 load/store multiple instructions. */
12510
12511 static int
12512 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12513 {
12514 struct regcache *reg_cache = thumb2_insn_r->regcache;
12515
12516 uint32_t reg_rn, op;
12517 uint32_t register_bits = 0, register_count = 0;
12518 uint32_t index = 0, start_address = 0;
12519 uint32_t record_buf[24], record_buf_mem[48];
12520
12521 ULONGEST u_regval = 0;
12522
12523 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12524 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12525
12526 if (0 == op || 3 == op)
12527 {
12528 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12529 {
12530 /* Handle RFE instruction. */
12531 record_buf[0] = ARM_PS_REGNUM;
12532 thumb2_insn_r->reg_rec_count = 1;
12533 }
12534 else
12535 {
12536 /* Handle SRS instruction after reading banked SP. */
12537 return arm_record_unsupported_insn (thumb2_insn_r);
12538 }
12539 }
12540 else if (1 == op || 2 == op)
12541 {
12542 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12543 {
12544 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12545 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12546 while (register_bits)
12547 {
12548 if (register_bits & 0x00000001)
12549 record_buf[index++] = register_count;
12550
12551 register_count++;
12552 register_bits = register_bits >> 1;
12553 }
12554 record_buf[index++] = reg_rn;
12555 record_buf[index++] = ARM_PS_REGNUM;
12556 thumb2_insn_r->reg_rec_count = index;
12557 }
12558 else
12559 {
12560 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12561 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12562 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12563 while (register_bits)
12564 {
12565 if (register_bits & 0x00000001)
12566 register_count++;
12567
12568 register_bits = register_bits >> 1;
12569 }
12570
12571 if (1 == op)
12572 {
12573 /* Start address calculation for LDMDB/LDMEA. */
12574 start_address = u_regval;
12575 }
12576 else if (2 == op)
12577 {
12578 /* Start address calculation for LDMDB/LDMEA. */
12579 start_address = u_regval - register_count * 4;
12580 }
12581
12582 thumb2_insn_r->mem_rec_count = register_count;
12583 while (register_count)
12584 {
12585 record_buf_mem[register_count * 2 - 1] = start_address;
12586 record_buf_mem[register_count * 2 - 2] = 4;
12587 start_address = start_address + 4;
12588 register_count--;
12589 }
12590 record_buf[0] = reg_rn;
12591 record_buf[1] = ARM_PS_REGNUM;
12592 thumb2_insn_r->reg_rec_count = 2;
12593 }
12594 }
12595
12596 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12597 record_buf_mem);
12598 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12599 record_buf);
12600 return ARM_RECORD_SUCCESS;
12601 }
12602
12603 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12604 instructions. */
12605
12606 static int
12607 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12608 {
12609 struct regcache *reg_cache = thumb2_insn_r->regcache;
12610
12611 uint32_t reg_rd, reg_rn, offset_imm;
12612 uint32_t reg_dest1, reg_dest2;
12613 uint32_t address, offset_addr;
12614 uint32_t record_buf[8], record_buf_mem[8];
12615 uint32_t op1, op2, op3;
12616 LONGEST s_word;
12617
12618 ULONGEST u_regval[2];
12619
12620 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12621 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12622 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12623
12624 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12625 {
12626 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12627 {
12628 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12629 record_buf[0] = reg_dest1;
12630 record_buf[1] = ARM_PS_REGNUM;
12631 thumb2_insn_r->reg_rec_count = 2;
12632 }
12633
12634 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12635 {
12636 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12637 record_buf[2] = reg_dest2;
12638 thumb2_insn_r->reg_rec_count = 3;
12639 }
12640 }
12641 else
12642 {
12643 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12644 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12645
12646 if (0 == op1 && 0 == op2)
12647 {
12648 /* Handle STREX. */
12649 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12650 address = u_regval[0] + (offset_imm * 4);
12651 record_buf_mem[0] = 4;
12652 record_buf_mem[1] = address;
12653 thumb2_insn_r->mem_rec_count = 1;
12654 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12655 record_buf[0] = reg_rd;
12656 thumb2_insn_r->reg_rec_count = 1;
12657 }
12658 else if (1 == op1 && 0 == op2)
12659 {
12660 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12661 record_buf[0] = reg_rd;
12662 thumb2_insn_r->reg_rec_count = 1;
12663 address = u_regval[0];
12664 record_buf_mem[1] = address;
12665
12666 if (4 == op3)
12667 {
12668 /* Handle STREXB. */
12669 record_buf_mem[0] = 1;
12670 thumb2_insn_r->mem_rec_count = 1;
12671 }
12672 else if (5 == op3)
12673 {
12674 /* Handle STREXH. */
12675 record_buf_mem[0] = 2 ;
12676 thumb2_insn_r->mem_rec_count = 1;
12677 }
12678 else if (7 == op3)
12679 {
12680 /* Handle STREXD. */
12681 address = u_regval[0];
12682 record_buf_mem[0] = 4;
12683 record_buf_mem[2] = 4;
12684 record_buf_mem[3] = address + 4;
12685 thumb2_insn_r->mem_rec_count = 2;
12686 }
12687 }
12688 else
12689 {
12690 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12691
12692 if (bit (thumb2_insn_r->arm_insn, 24))
12693 {
12694 if (bit (thumb2_insn_r->arm_insn, 23))
12695 offset_addr = u_regval[0] + (offset_imm * 4);
12696 else
12697 offset_addr = u_regval[0] - (offset_imm * 4);
12698
12699 address = offset_addr;
12700 }
12701 else
12702 address = u_regval[0];
12703
12704 record_buf_mem[0] = 4;
12705 record_buf_mem[1] = address;
12706 record_buf_mem[2] = 4;
12707 record_buf_mem[3] = address + 4;
12708 thumb2_insn_r->mem_rec_count = 2;
12709 record_buf[0] = reg_rn;
12710 thumb2_insn_r->reg_rec_count = 1;
12711 }
12712 }
12713
12714 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12715 record_buf);
12716 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12717 record_buf_mem);
12718 return ARM_RECORD_SUCCESS;
12719 }
12720
12721 /* Handler for thumb2 data processing (shift register and modified immediate)
12722 instructions. */
12723
12724 static int
12725 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12726 {
12727 uint32_t reg_rd, op;
12728 uint32_t record_buf[8];
12729
12730 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12731 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12732
12733 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12734 {
12735 record_buf[0] = ARM_PS_REGNUM;
12736 thumb2_insn_r->reg_rec_count = 1;
12737 }
12738 else
12739 {
12740 record_buf[0] = reg_rd;
12741 record_buf[1] = ARM_PS_REGNUM;
12742 thumb2_insn_r->reg_rec_count = 2;
12743 }
12744
12745 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12746 record_buf);
12747 return ARM_RECORD_SUCCESS;
12748 }
12749
12750 /* Generic handler for thumb2 instructions which effect destination and PS
12751 registers. */
12752
12753 static int
12754 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12755 {
12756 uint32_t reg_rd;
12757 uint32_t record_buf[8];
12758
12759 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12760
12761 record_buf[0] = reg_rd;
12762 record_buf[1] = ARM_PS_REGNUM;
12763 thumb2_insn_r->reg_rec_count = 2;
12764
12765 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12766 record_buf);
12767 return ARM_RECORD_SUCCESS;
12768 }
12769
12770 /* Handler for thumb2 branch and miscellaneous control instructions. */
12771
12772 static int
12773 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12774 {
12775 uint32_t op, op1, op2;
12776 uint32_t record_buf[8];
12777
12778 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12779 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12780 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12781
12782 /* Handle MSR insn. */
12783 if (!(op1 & 0x2) && 0x38 == op)
12784 {
12785 if (!(op2 & 0x3))
12786 {
12787 /* CPSR is going to be changed. */
12788 record_buf[0] = ARM_PS_REGNUM;
12789 thumb2_insn_r->reg_rec_count = 1;
12790 }
12791 else
12792 {
12793 arm_record_unsupported_insn(thumb2_insn_r);
12794 return -1;
12795 }
12796 }
12797 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12798 {
12799 /* BLX. */
12800 record_buf[0] = ARM_PS_REGNUM;
12801 record_buf[1] = ARM_LR_REGNUM;
12802 thumb2_insn_r->reg_rec_count = 2;
12803 }
12804
12805 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12806 record_buf);
12807 return ARM_RECORD_SUCCESS;
12808 }
12809
12810 /* Handler for thumb2 store single data item instructions. */
12811
12812 static int
12813 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12814 {
12815 struct regcache *reg_cache = thumb2_insn_r->regcache;
12816
12817 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12818 uint32_t address, offset_addr;
12819 uint32_t record_buf[8], record_buf_mem[8];
12820 uint32_t op1, op2;
12821
12822 ULONGEST u_regval[2];
12823
12824 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12825 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12826 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12827 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12828
12829 if (bit (thumb2_insn_r->arm_insn, 23))
12830 {
12831 /* T2 encoding. */
12832 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12833 offset_addr = u_regval[0] + offset_imm;
12834 address = offset_addr;
12835 }
12836 else
12837 {
12838 /* T3 encoding. */
12839 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12840 {
12841 /* Handle STRB (register). */
12842 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12843 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12844 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12845 offset_addr = u_regval[1] << shift_imm;
12846 address = u_regval[0] + offset_addr;
12847 }
12848 else
12849 {
12850 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12851 if (bit (thumb2_insn_r->arm_insn, 10))
12852 {
12853 if (bit (thumb2_insn_r->arm_insn, 9))
12854 offset_addr = u_regval[0] + offset_imm;
12855 else
12856 offset_addr = u_regval[0] - offset_imm;
12857
12858 address = offset_addr;
12859 }
12860 else
12861 address = u_regval[0];
12862 }
12863 }
12864
12865 switch (op1)
12866 {
12867 /* Store byte instructions. */
12868 case 4:
12869 case 0:
12870 record_buf_mem[0] = 1;
12871 break;
12872 /* Store half word instructions. */
12873 case 1:
12874 case 5:
12875 record_buf_mem[0] = 2;
12876 break;
12877 /* Store word instructions. */
12878 case 2:
12879 case 6:
12880 record_buf_mem[0] = 4;
12881 break;
12882
12883 default:
12884 gdb_assert_not_reached ("no decoding pattern found");
12885 break;
12886 }
12887
12888 record_buf_mem[1] = address;
12889 thumb2_insn_r->mem_rec_count = 1;
12890 record_buf[0] = reg_rn;
12891 thumb2_insn_r->reg_rec_count = 1;
12892
12893 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12894 record_buf);
12895 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12896 record_buf_mem);
12897 return ARM_RECORD_SUCCESS;
12898 }
12899
12900 /* Handler for thumb2 load memory hints instructions. */
12901
12902 static int
12903 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12904 {
12905 uint32_t record_buf[8];
12906 uint32_t reg_rt, reg_rn;
12907
12908 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12909 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12910
12911 if (ARM_PC_REGNUM != reg_rt)
12912 {
12913 record_buf[0] = reg_rt;
12914 record_buf[1] = reg_rn;
12915 record_buf[2] = ARM_PS_REGNUM;
12916 thumb2_insn_r->reg_rec_count = 3;
12917
12918 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12919 record_buf);
12920 return ARM_RECORD_SUCCESS;
12921 }
12922
12923 return ARM_RECORD_FAILURE;
12924 }
12925
12926 /* Handler for thumb2 load word instructions. */
12927
12928 static int
12929 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12930 {
12931 uint32_t opcode1 = 0, opcode2 = 0;
12932 uint32_t record_buf[8];
12933
12934 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12935 record_buf[1] = ARM_PS_REGNUM;
12936 thumb2_insn_r->reg_rec_count = 2;
12937
12938 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12939 record_buf);
12940 return ARM_RECORD_SUCCESS;
12941 }
12942
12943 /* Handler for thumb2 long multiply, long multiply accumulate, and
12944 divide instructions. */
12945
12946 static int
12947 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12948 {
12949 uint32_t opcode1 = 0, opcode2 = 0;
12950 uint32_t record_buf[8];
12951 uint32_t reg_src1 = 0;
12952
12953 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12954 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12955
12956 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12957 {
12958 /* Handle SMULL, UMULL, SMULAL. */
12959 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12960 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12961 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12962 record_buf[2] = ARM_PS_REGNUM;
12963 thumb2_insn_r->reg_rec_count = 3;
12964 }
12965 else if (1 == opcode1 || 3 == opcode2)
12966 {
12967 /* Handle SDIV and UDIV. */
12968 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12969 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12970 record_buf[2] = ARM_PS_REGNUM;
12971 thumb2_insn_r->reg_rec_count = 3;
12972 }
12973 else
12974 return ARM_RECORD_FAILURE;
12975
12976 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12977 record_buf);
12978 return ARM_RECORD_SUCCESS;
12979 }
12980
12981 /* Decodes thumb2 instruction type and invokes its record handler. */
12982
12983 static unsigned int
12984 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12985 {
12986 uint32_t op, op1, op2;
12987
12988 op = bit (thumb2_insn_r->arm_insn, 15);
12989 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12990 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12991
12992 if (op1 == 0x01)
12993 {
12994 if (!(op2 & 0x64 ))
12995 {
12996 /* Load/store multiple instruction. */
12997 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12998 }
12999 else if (!((op2 & 0x64) ^ 0x04))
13000 {
13001 /* Load/store (dual/exclusive) and table branch instruction. */
13002 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13003 }
13004 else if (!((op2 & 0x20) ^ 0x20))
13005 {
13006 /* Data-processing (shifted register). */
13007 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13008 }
13009 else if (op2 & 0x40)
13010 {
13011 /* Co-processor instructions. */
13012 arm_record_unsupported_insn (thumb2_insn_r);
13013 }
13014 }
13015 else if (op1 == 0x02)
13016 {
13017 if (op)
13018 {
13019 /* Branches and miscellaneous control instructions. */
13020 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13021 }
13022 else if (op2 & 0x20)
13023 {
13024 /* Data-processing (plain binary immediate) instruction. */
13025 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13026 }
13027 else
13028 {
13029 /* Data-processing (modified immediate). */
13030 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13031 }
13032 }
13033 else if (op1 == 0x03)
13034 {
13035 if (!(op2 & 0x71 ))
13036 {
13037 /* Store single data item. */
13038 return thumb2_record_str_single_data (thumb2_insn_r);
13039 }
13040 else if (!((op2 & 0x71) ^ 0x10))
13041 {
13042 /* Advanced SIMD or structure load/store instructions. */
13043 return arm_record_unsupported_insn (thumb2_insn_r);
13044 }
13045 else if (!((op2 & 0x67) ^ 0x01))
13046 {
13047 /* Load byte, memory hints instruction. */
13048 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13049 }
13050 else if (!((op2 & 0x67) ^ 0x03))
13051 {
13052 /* Load halfword, memory hints instruction. */
13053 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13054 }
13055 else if (!((op2 & 0x67) ^ 0x05))
13056 {
13057 /* Load word instruction. */
13058 return thumb2_record_ld_word (thumb2_insn_r);
13059 }
13060 else if (!((op2 & 0x70) ^ 0x20))
13061 {
13062 /* Data-processing (register) instruction. */
13063 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13064 }
13065 else if (!((op2 & 0x78) ^ 0x30))
13066 {
13067 /* Multiply, multiply accumulate, abs diff instruction. */
13068 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13069 }
13070 else if (!((op2 & 0x78) ^ 0x38))
13071 {
13072 /* Long multiply, long multiply accumulate, and divide. */
13073 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13074 }
13075 else if (op2 & 0x40)
13076 {
13077 /* Co-processor instructions. */
13078 return arm_record_unsupported_insn (thumb2_insn_r);
13079 }
13080 }
13081
13082 return -1;
13083 }
13084
13085 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13086 and positive val on fauilure. */
13087
13088 static int
13089 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13090 {
13091 gdb_byte buf[insn_size];
13092
13093 memset (&buf[0], 0, insn_size);
13094
13095 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13096 return 1;
13097 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13098 insn_size,
13099 gdbarch_byte_order (insn_record->gdbarch));
13100 return 0;
13101 }
13102
13103 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13104
13105 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13106 dispatch it. */
13107
13108 static int
13109 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13110 uint32_t insn_size)
13111 {
13112
13113 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13114 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13115 {
13116 arm_record_data_proc_misc_ld_str, /* 000. */
13117 arm_record_data_proc_imm, /* 001. */
13118 arm_record_ld_st_imm_offset, /* 010. */
13119 arm_record_ld_st_reg_offset, /* 011. */
13120 arm_record_ld_st_multiple, /* 100. */
13121 arm_record_b_bl, /* 101. */
13122 arm_record_unsupported_insn, /* 110. */
13123 arm_record_coproc_data_proc /* 111. */
13124 };
13125
13126 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13127 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13128 { \
13129 thumb_record_shift_add_sub, /* 000. */
13130 thumb_record_add_sub_cmp_mov, /* 001. */
13131 thumb_record_ld_st_reg_offset, /* 010. */
13132 thumb_record_ld_st_imm_offset, /* 011. */
13133 thumb_record_ld_st_stack, /* 100. */
13134 thumb_record_misc, /* 101. */
13135 thumb_record_ldm_stm_swi, /* 110. */
13136 thumb_record_branch /* 111. */
13137 };
13138
13139 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13140 uint32_t insn_id = 0;
13141
13142 if (extract_arm_insn (arm_record, insn_size))
13143 {
13144 if (record_debug)
13145 {
13146 printf_unfiltered (_("Process record: error reading memory at "
13147 "addr %s len = %d.\n"),
13148 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13149 }
13150 return -1;
13151 }
13152 else if (ARM_RECORD == record_type)
13153 {
13154 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13155 insn_id = bits (arm_record->arm_insn, 25, 27);
13156 ret = arm_record_extension_space (arm_record);
13157 /* If this insn has fallen into extension space
13158 then we need not decode it anymore. */
13159 if (ret != -1 && !INSN_RECORDED(arm_record))
13160 {
13161 ret = arm_handle_insn[insn_id] (arm_record);
13162 }
13163 }
13164 else if (THUMB_RECORD == record_type)
13165 {
13166 /* As thumb does not have condition codes, we set negative. */
13167 arm_record->cond = -1;
13168 insn_id = bits (arm_record->arm_insn, 13, 15);
13169 ret = thumb_handle_insn[insn_id] (arm_record);
13170 }
13171 else if (THUMB2_RECORD == record_type)
13172 {
13173 /* As thumb does not have condition codes, we set negative. */
13174 arm_record->cond = -1;
13175
13176 /* Swap first half of 32bit thumb instruction with second half. */
13177 arm_record->arm_insn
13178 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13179
13180 insn_id = thumb2_record_decode_insn_handler (arm_record);
13181
13182 if (insn_id != ARM_RECORD_SUCCESS)
13183 {
13184 arm_record_unsupported_insn (arm_record);
13185 ret = -1;
13186 }
13187 }
13188 else
13189 {
13190 /* Throw assertion. */
13191 gdb_assert_not_reached ("not a valid instruction, could not decode");
13192 }
13193
13194 return ret;
13195 }
13196
13197
13198 /* Cleans up local record registers and memory allocations. */
13199
13200 static void
13201 deallocate_reg_mem (insn_decode_record *record)
13202 {
13203 xfree (record->arm_regs);
13204 xfree (record->arm_mems);
13205 }
13206
13207
13208 /* Parse the current instruction and record the values of the registers and
13209 memory that will be changed in current instruction to record_arch_list".
13210 Return -1 if something is wrong. */
13211
13212 int
13213 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13214 CORE_ADDR insn_addr)
13215 {
13216
13217 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13218 uint32_t no_of_rec = 0;
13219 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13220 ULONGEST t_bit = 0, insn_id = 0;
13221
13222 ULONGEST u_regval = 0;
13223
13224 insn_decode_record arm_record;
13225
13226 memset (&arm_record, 0, sizeof (insn_decode_record));
13227 arm_record.regcache = regcache;
13228 arm_record.this_addr = insn_addr;
13229 arm_record.gdbarch = gdbarch;
13230
13231
13232 if (record_debug > 1)
13233 {
13234 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13235 "addr = %s\n",
13236 paddress (gdbarch, arm_record.this_addr));
13237 }
13238
13239 if (extract_arm_insn (&arm_record, 2))
13240 {
13241 if (record_debug)
13242 {
13243 printf_unfiltered (_("Process record: error reading memory at "
13244 "addr %s len = %d.\n"),
13245 paddress (arm_record.gdbarch,
13246 arm_record.this_addr), 2);
13247 }
13248 return -1;
13249 }
13250
13251 /* Check the insn, whether it is thumb or arm one. */
13252
13253 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13254 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13255
13256
13257 if (!(u_regval & t_bit))
13258 {
13259 /* We are decoding arm insn. */
13260 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13261 }
13262 else
13263 {
13264 insn_id = bits (arm_record.arm_insn, 11, 15);
13265 /* is it thumb2 insn? */
13266 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13267 {
13268 ret = decode_insn (&arm_record, THUMB2_RECORD,
13269 THUMB2_INSN_SIZE_BYTES);
13270 }
13271 else
13272 {
13273 /* We are decoding thumb insn. */
13274 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13275 }
13276 }
13277
13278 if (0 == ret)
13279 {
13280 /* Record registers. */
13281 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13282 if (arm_record.arm_regs)
13283 {
13284 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13285 {
13286 if (record_full_arch_list_add_reg
13287 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13288 ret = -1;
13289 }
13290 }
13291 /* Record memories. */
13292 if (arm_record.arm_mems)
13293 {
13294 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13295 {
13296 if (record_full_arch_list_add_mem
13297 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13298 arm_record.arm_mems[no_of_rec].len))
13299 ret = -1;
13300 }
13301 }
13302
13303 if (record_full_arch_list_add_end ())
13304 ret = -1;
13305 }
13306
13307
13308 deallocate_reg_mem (&arm_record);
13309
13310 return ret;
13311 }
13312
This page took 0.369354 seconds and 4 git commands to generate.