[ARM] "svc" insn check at irrelevant address in ARM unwind info sniffer
[binutils-gdb.git] / gdb / arm-tdep.c
blob6ce6f09c37ff164d327281df1cdbe5a3d4f4136f
1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 #include "defs.h"
22 #include <ctype.h> /* XXX for isupper (). */
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
48 #include "arch/arm.h"
49 #include "arm-tdep.h"
50 #include "gdb/sim-arm.h"
52 #include "elf-bfd.h"
53 #include "coff/internal.h"
54 #include "elf/arm.h"
56 #include "vec.h"
58 #include "record.h"
59 #include "record-full.h"
61 #include "features/arm-with-m.c"
62 #include "features/arm-with-m-fpa-layout.c"
63 #include "features/arm-with-m-vfp-d16.c"
64 #include "features/arm-with-iwmmxt.c"
65 #include "features/arm-with-vfpv2.c"
66 #include "features/arm-with-vfpv3.c"
67 #include "features/arm-with-neon.c"
69 static int arm_debug;
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
84 /* Per-objfile data used for mapping symbols. */
85 static const struct objfile_data *arm_objfile_data_key;
87 struct arm_mapping_symbol
89 bfd_vma value;
90 char type;
92 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
93 DEF_VEC_O(arm_mapping_symbol_s);
95 struct arm_per_objfile
97 VEC(arm_mapping_symbol_s) **section_maps;
100 /* The list of available "set arm ..." and "show arm ..." commands. */
101 static struct cmd_list_element *setarmcmdlist = NULL;
102 static struct cmd_list_element *showarmcmdlist = NULL;
104 /* The type of floating-point to use. Keep this in sync with enum
105 arm_float_model, and the help string in _initialize_arm_tdep. */
106 static const char *const fp_model_strings[] =
108 "auto",
109 "softfpa",
110 "fpa",
111 "softvfp",
112 "vfp",
113 NULL
116 /* A variable that can be configured by the user. */
117 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
118 static const char *current_fp_model = "auto";
120 /* The ABI to use. Keep this in sync with arm_abi_kind. */
121 static const char *const arm_abi_strings[] =
123 "auto",
124 "APCS",
125 "AAPCS",
126 NULL
129 /* A variable that can be configured by the user. */
130 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
131 static const char *arm_abi_string = "auto";
133 /* The execution mode to assume. */
134 static const char *const arm_mode_strings[] =
136 "auto",
137 "arm",
138 "thumb",
139 NULL
142 static const char *arm_fallback_mode_string = "auto";
143 static const char *arm_force_mode_string = "auto";
145 /* Internal override of the execution mode. -1 means no override,
146 0 means override to ARM mode, 1 means override to Thumb mode.
147 The effect is the same as if arm_force_mode has been set by the
148 user (except the internal override has precedence over a user's
149 arm_force_mode override). */
150 static int arm_override_mode = -1;
152 /* Number of different reg name sets (options). */
153 static int num_disassembly_options;
155 /* The standard register names, and all the valid aliases for them. Note
156 that `fp', `sp' and `pc' are not added in this alias list, because they
157 have been added as builtin user registers in
158 std-regs.c:_initialize_frame_reg. */
159 static const struct
161 const char *name;
162 int regnum;
163 } arm_register_aliases[] = {
164 /* Basic register numbers. */
165 { "r0", 0 },
166 { "r1", 1 },
167 { "r2", 2 },
168 { "r3", 3 },
169 { "r4", 4 },
170 { "r5", 5 },
171 { "r6", 6 },
172 { "r7", 7 },
173 { "r8", 8 },
174 { "r9", 9 },
175 { "r10", 10 },
176 { "r11", 11 },
177 { "r12", 12 },
178 { "r13", 13 },
179 { "r14", 14 },
180 { "r15", 15 },
181 /* Synonyms (argument and variable registers). */
182 { "a1", 0 },
183 { "a2", 1 },
184 { "a3", 2 },
185 { "a4", 3 },
186 { "v1", 4 },
187 { "v2", 5 },
188 { "v3", 6 },
189 { "v4", 7 },
190 { "v5", 8 },
191 { "v6", 9 },
192 { "v7", 10 },
193 { "v8", 11 },
194 /* Other platform-specific names for r9. */
195 { "sb", 9 },
196 { "tr", 9 },
197 /* Special names. */
198 { "ip", 12 },
199 { "lr", 14 },
200 /* Names used by GCC (not listed in the ARM EABI). */
201 { "sl", 10 },
202 /* A special name from the older ATPCS. */
203 { "wr", 7 },
206 static const char *const arm_register_names[] =
207 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
208 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
209 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
210 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
211 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
212 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
213 "fps", "cpsr" }; /* 24 25 */
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
222 style. */
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element *);
225 static void set_disassembly_style (void);
227 static void convert_from_extended (const struct floatformat *, const void *,
228 void *, int);
229 static void convert_to_extended (const struct floatformat *, void *,
230 const void *, int);
232 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, gdb_byte *buf);
235 static void arm_neon_quad_write (struct gdbarch *gdbarch,
236 struct regcache *regcache,
237 int regnum, const gdb_byte *buf);
239 struct arm_prologue_cache
241 /* The stack pointer at the time this frame was created; i.e. the
242 caller's stack pointer when this function was called. It is used
243 to identify this frame. */
244 CORE_ADDR prev_sp;
246 /* The frame base for this frame is just prev_sp - frame size.
247 FRAMESIZE is the distance from the frame pointer to the
248 initial stack pointer. */
250 int framesize;
252 /* The register used to hold the frame pointer for this frame. */
253 int framereg;
255 /* Saved register offsets. */
256 struct trad_frame_saved_reg *saved_regs;
259 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
260 CORE_ADDR prologue_start,
261 CORE_ADDR prologue_end,
262 struct arm_prologue_cache *cache);
264 /* Architecture version for displaced stepping. This effects the behaviour of
265 certain instructions, and really should not be hard-wired. */
267 #define DISPLACED_STEPPING_ARCH_VERSION 5
269 /* Set to true if the 32-bit mode is in use. */
271 int arm_apcs_32 = 1;
273 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
276 arm_psr_thumb_bit (struct gdbarch *gdbarch)
278 if (gdbarch_tdep (gdbarch)->is_m)
279 return XPSR_T;
280 else
281 return CPSR_T;
284 /* Determine if FRAME is executing in Thumb mode. */
287 arm_frame_is_thumb (struct frame_info *frame)
289 CORE_ADDR cpsr;
290 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
292 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
293 directly (from a signal frame or dummy frame) or by interpreting
294 the saved LR (from a prologue or DWARF frame). So consult it and
295 trust the unwinders. */
296 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
298 return (cpsr & t_bit) != 0;
301 /* Callback for VEC_lower_bound. */
303 static inline int
304 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
305 const struct arm_mapping_symbol *rhs)
307 return lhs->value < rhs->value;
310 /* Search for the mapping symbol covering MEMADDR. If one is found,
311 return its type. Otherwise, return 0. If START is non-NULL,
312 set *START to the location of the mapping symbol. */
314 static char
315 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
317 struct obj_section *sec;
319 /* If there are mapping symbols, consult them. */
320 sec = find_pc_section (memaddr);
321 if (sec != NULL)
323 struct arm_per_objfile *data;
324 VEC(arm_mapping_symbol_s) *map;
325 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
326 0 };
327 unsigned int idx;
329 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
330 arm_objfile_data_key);
331 if (data != NULL)
333 map = data->section_maps[sec->the_bfd_section->index];
334 if (!VEC_empty (arm_mapping_symbol_s, map))
336 struct arm_mapping_symbol *map_sym;
338 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
339 arm_compare_mapping_symbols);
341 /* VEC_lower_bound finds the earliest ordered insertion
342 point. If the following symbol starts at this exact
343 address, we use that; otherwise, the preceding
344 mapping symbol covers this address. */
345 if (idx < VEC_length (arm_mapping_symbol_s, map))
347 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
348 if (map_sym->value == map_key.value)
350 if (start)
351 *start = map_sym->value + obj_section_addr (sec);
352 return map_sym->type;
356 if (idx > 0)
358 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
359 if (start)
360 *start = map_sym->value + obj_section_addr (sec);
361 return map_sym->type;
367 return 0;
370 /* Determine if the program counter specified in MEMADDR is in a Thumb
371 function. This function should be called for addresses unrelated to
372 any executing frame; otherwise, prefer arm_frame_is_thumb. */
375 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
377 struct bound_minimal_symbol sym;
378 char type;
379 struct displaced_step_closure* dsc
380 = get_displaced_step_closure_by_addr(memaddr);
382 /* If checking the mode of displaced instruction in copy area, the mode
383 should be determined by instruction on the original address. */
384 if (dsc)
386 if (debug_displaced)
387 fprintf_unfiltered (gdb_stdlog,
388 "displaced: check mode of %.8lx instead of %.8lx\n",
389 (unsigned long) dsc->insn_addr,
390 (unsigned long) memaddr);
391 memaddr = dsc->insn_addr;
394 /* If bit 0 of the address is set, assume this is a Thumb address. */
395 if (IS_THUMB_ADDR (memaddr))
396 return 1;
398 /* Respect internal mode override if active. */
399 if (arm_override_mode != -1)
400 return arm_override_mode;
402 /* If the user wants to override the symbol table, let him. */
403 if (strcmp (arm_force_mode_string, "arm") == 0)
404 return 0;
405 if (strcmp (arm_force_mode_string, "thumb") == 0)
406 return 1;
408 /* ARM v6-M and v7-M are always in Thumb mode. */
409 if (gdbarch_tdep (gdbarch)->is_m)
410 return 1;
412 /* If there are mapping symbols, consult them. */
413 type = arm_find_mapping_symbol (memaddr, NULL);
414 if (type)
415 return type == 't';
417 /* Thumb functions have a "special" bit set in minimal symbols. */
418 sym = lookup_minimal_symbol_by_pc (memaddr);
419 if (sym.minsym)
420 return (MSYMBOL_IS_SPECIAL (sym.minsym));
422 /* If the user wants to override the fallback mode, let them. */
423 if (strcmp (arm_fallback_mode_string, "arm") == 0)
424 return 0;
425 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
426 return 1;
428 /* If we couldn't find any symbol, but we're talking to a running
429 target, then trust the current value of $cpsr. This lets
430 "display/i $pc" always show the correct mode (though if there is
431 a symbol table we will not reach here, so it still may not be
432 displayed in the mode it will be executed). */
433 if (target_has_registers)
434 return arm_frame_is_thumb (get_current_frame ());
436 /* Otherwise we're out of luck; we assume ARM. */
437 return 0;
440 /* Remove useless bits from addresses in a running program. */
441 static CORE_ADDR
442 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
444 /* On M-profile devices, do not strip the low bit from EXC_RETURN
445 (the magic exception return address). */
446 if (gdbarch_tdep (gdbarch)->is_m
447 && (val & 0xfffffff0) == 0xfffffff0)
448 return val;
450 if (arm_apcs_32)
451 return UNMAKE_THUMB_ADDR (val);
452 else
453 return (val & 0x03fffffc);
456 /* Return 1 if PC is the start of a compiler helper function which
457 can be safely ignored during prologue skipping. IS_THUMB is true
458 if the function is known to be a Thumb function due to the way it
459 is being called. */
460 static int
461 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
463 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
464 struct bound_minimal_symbol msym;
466 msym = lookup_minimal_symbol_by_pc (pc);
467 if (msym.minsym != NULL
468 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
469 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
471 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
473 /* The GNU linker's Thumb call stub to foo is named
474 __foo_from_thumb. */
475 if (strstr (name, "_from_thumb") != NULL)
476 name += 2;
478 /* On soft-float targets, __truncdfsf2 is called to convert promoted
479 arguments to their argument types in non-prototyped
480 functions. */
481 if (startswith (name, "__truncdfsf2"))
482 return 1;
483 if (startswith (name, "__aeabi_d2f"))
484 return 1;
486 /* Internal functions related to thread-local storage. */
487 if (startswith (name, "__tls_get_addr"))
488 return 1;
489 if (startswith (name, "__aeabi_read_tp"))
490 return 1;
492 else
494 /* If we run against a stripped glibc, we may be unable to identify
495 special functions by name. Check for one important case,
496 __aeabi_read_tp, by comparing the *code* against the default
497 implementation (this is hand-written ARM assembler in glibc). */
499 if (!is_thumb
500 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
501 == 0xe3e00a0f /* mov r0, #0xffff0fff */
502 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
503 == 0xe240f01f) /* sub pc, r0, #31 */
504 return 1;
507 return 0;
510 /* Support routines for instruction parsing. */
511 #define submask(x) ((1L << ((x) + 1)) - 1)
512 #define bit(obj,st) (((obj) >> (st)) & 1)
513 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
514 #define sbits(obj,st,fn) \
515 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
516 #define BranchDest(addr,instr) \
517 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
519 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
520 the first 16-bit of instruction, and INSN2 is the second 16-bit of
521 instruction. */
522 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
523 ((bits ((insn1), 0, 3) << 12) \
524 | (bits ((insn1), 10, 10) << 11) \
525 | (bits ((insn2), 12, 14) << 8) \
526 | bits ((insn2), 0, 7))
528 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
529 the 32-bit instruction. */
530 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
531 ((bits ((insn), 16, 19) << 12) \
532 | bits ((insn), 0, 11))
534 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
536 static unsigned int
537 thumb_expand_immediate (unsigned int imm)
539 unsigned int count = imm >> 7;
541 if (count < 8)
542 switch (count / 2)
544 case 0:
545 return imm & 0xff;
546 case 1:
547 return (imm & 0xff) | ((imm & 0xff) << 16);
548 case 2:
549 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
550 case 3:
551 return (imm & 0xff) | ((imm & 0xff) << 8)
552 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
555 return (0x80 | (imm & 0x7f)) << (32 - count);
558 /* Return 1 if the 16-bit Thumb instruction INST might change
559 control flow, 0 otherwise. */
561 static int
562 thumb_instruction_changes_pc (unsigned short inst)
564 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
565 return 1;
567 if ((inst & 0xf000) == 0xd000) /* conditional branch */
568 return 1;
570 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
571 return 1;
573 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
574 return 1;
576 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
577 return 1;
579 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
580 return 1;
582 return 0;
585 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
586 might change control flow, 0 otherwise. */
588 static int
589 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
591 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
593 /* Branches and miscellaneous control instructions. */
595 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
597 /* B, BL, BLX. */
598 return 1;
600 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
602 /* SUBS PC, LR, #imm8. */
603 return 1;
605 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
607 /* Conditional branch. */
608 return 1;
611 return 0;
614 if ((inst1 & 0xfe50) == 0xe810)
616 /* Load multiple or RFE. */
618 if (bit (inst1, 7) && !bit (inst1, 8))
620 /* LDMIA or POP */
621 if (bit (inst2, 15))
622 return 1;
624 else if (!bit (inst1, 7) && bit (inst1, 8))
626 /* LDMDB */
627 if (bit (inst2, 15))
628 return 1;
630 else if (bit (inst1, 7) && bit (inst1, 8))
632 /* RFEIA */
633 return 1;
635 else if (!bit (inst1, 7) && !bit (inst1, 8))
637 /* RFEDB */
638 return 1;
641 return 0;
644 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
646 /* MOV PC or MOVS PC. */
647 return 1;
650 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
652 /* LDR PC. */
653 if (bits (inst1, 0, 3) == 15)
654 return 1;
655 if (bit (inst1, 7))
656 return 1;
657 if (bit (inst2, 11))
658 return 1;
659 if ((inst2 & 0x0fc0) == 0x0000)
660 return 1;
662 return 0;
665 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
667 /* TBB. */
668 return 1;
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
673 /* TBH. */
674 return 1;
677 return 0;
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
683 static int
684 thumb_instruction_restores_sp (unsigned short insn)
686 return (insn == 0x46bd /* mov sp, r7 */
687 || (insn & 0xff80) == 0xb000 /* add sp, imm */
688 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
697 static CORE_ADDR
698 thumb_analyze_prologue (struct gdbarch *gdbarch,
699 CORE_ADDR start, CORE_ADDR limit,
700 struct arm_prologue_cache *cache)
702 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
703 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
704 int i;
705 pv_t regs[16];
706 struct pv_area *stack;
707 struct cleanup *back_to;
708 CORE_ADDR offset;
709 CORE_ADDR unrecognized_pc = 0;
711 for (i = 0; i < 16; i++)
712 regs[i] = pv_register (i, 0);
713 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
714 back_to = make_cleanup_free_pv_area (stack);
716 while (start < limit)
718 unsigned short insn;
720 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
722 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
724 int regno;
725 int mask;
727 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
728 break;
730 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
731 whether to save LR (R14). */
732 mask = (insn & 0xff) | ((insn & 0x100) << 6);
734 /* Calculate offsets of saved R0-R7 and LR. */
735 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
736 if (mask & (1 << regno))
738 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
739 -4);
740 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
743 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
745 offset = (insn & 0x7f) << 2; /* get scaled offset */
746 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
747 -offset);
749 else if (thumb_instruction_restores_sp (insn))
751 /* Don't scan past the epilogue. */
752 break;
754 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
755 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
756 (insn & 0xff) << 2);
757 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
758 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
759 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
760 bits (insn, 6, 8));
761 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
762 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
763 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
764 bits (insn, 0, 7));
765 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
766 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
767 && pv_is_constant (regs[bits (insn, 3, 5)]))
768 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
769 regs[bits (insn, 6, 8)]);
770 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
771 && pv_is_constant (regs[bits (insn, 3, 6)]))
773 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
774 int rm = bits (insn, 3, 6);
775 regs[rd] = pv_add (regs[rd], regs[rm]);
777 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
779 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
780 int src_reg = (insn & 0x78) >> 3;
781 regs[dst_reg] = regs[src_reg];
783 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
785 /* Handle stores to the stack. Normally pushes are used,
786 but with GCC -mtpcs-frame, there may be other stores
787 in the prologue to create the frame. */
788 int regno = (insn >> 8) & 0x7;
789 pv_t addr;
791 offset = (insn & 0xff) << 2;
792 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
794 if (pv_area_store_would_trash (stack, addr))
795 break;
797 pv_area_store (stack, addr, 4, regs[regno]);
799 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
801 int rd = bits (insn, 0, 2);
802 int rn = bits (insn, 3, 5);
803 pv_t addr;
805 offset = bits (insn, 6, 10) << 2;
806 addr = pv_add_constant (regs[rn], offset);
808 if (pv_area_store_would_trash (stack, addr))
809 break;
811 pv_area_store (stack, addr, 4, regs[rd]);
813 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
814 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
815 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
816 /* Ignore stores of argument registers to the stack. */
818 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
819 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
820 /* Ignore block loads from the stack, potentially copying
821 parameters from memory. */
823 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
824 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
825 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
826 /* Similarly ignore single loads from the stack. */
828 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
829 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
830 /* Skip register copies, i.e. saves to another register
831 instead of the stack. */
833 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
834 /* Recognize constant loads; even with small stacks these are necessary
835 on Thumb. */
836 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
837 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
839 /* Constant pool loads, for the same reason. */
840 unsigned int constant;
841 CORE_ADDR loc;
843 loc = start + 4 + bits (insn, 0, 7) * 4;
844 constant = read_memory_unsigned_integer (loc, 4, byte_order);
845 regs[bits (insn, 8, 10)] = pv_constant (constant);
847 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
849 unsigned short inst2;
851 inst2 = read_memory_unsigned_integer (start + 2, 2,
852 byte_order_for_code);
854 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
856 /* BL, BLX. Allow some special function calls when
857 skipping the prologue; GCC generates these before
858 storing arguments to the stack. */
859 CORE_ADDR nextpc;
860 int j1, j2, imm1, imm2;
862 imm1 = sbits (insn, 0, 10);
863 imm2 = bits (inst2, 0, 10);
864 j1 = bit (inst2, 13);
865 j2 = bit (inst2, 11);
867 offset = ((imm1 << 12) + (imm2 << 1));
868 offset ^= ((!j2) << 22) | ((!j1) << 23);
870 nextpc = start + 4 + offset;
871 /* For BLX make sure to clear the low bits. */
872 if (bit (inst2, 12) == 0)
873 nextpc = nextpc & 0xfffffffc;
875 if (!skip_prologue_function (gdbarch, nextpc,
876 bit (inst2, 12) != 0))
877 break;
880 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
881 { registers } */
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
884 pv_t addr = regs[bits (insn, 0, 3)];
885 int regno;
887 if (pv_area_store_would_trash (stack, addr))
888 break;
890 /* Calculate offsets of saved registers. */
891 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
892 if (inst2 & (1 << regno))
894 addr = pv_add_constant (addr, -4);
895 pv_area_store (stack, addr, 4, regs[regno]);
898 if (insn & 0x0020)
899 regs[bits (insn, 0, 3)] = addr;
902 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
903 [Rn, #+/-imm]{!} */
904 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
906 int regno1 = bits (inst2, 12, 15);
907 int regno2 = bits (inst2, 8, 11);
908 pv_t addr = regs[bits (insn, 0, 3)];
910 offset = inst2 & 0xff;
911 if (insn & 0x0080)
912 addr = pv_add_constant (addr, offset);
913 else
914 addr = pv_add_constant (addr, -offset);
916 if (pv_area_store_would_trash (stack, addr))
917 break;
919 pv_area_store (stack, addr, 4, regs[regno1]);
920 pv_area_store (stack, pv_add_constant (addr, 4),
921 4, regs[regno2]);
923 if (insn & 0x0020)
924 regs[bits (insn, 0, 3)] = addr;
927 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
928 && (inst2 & 0x0c00) == 0x0c00
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 int regno = bits (inst2, 12, 15);
932 pv_t addr = regs[bits (insn, 0, 3)];
934 offset = inst2 & 0xff;
935 if (inst2 & 0x0200)
936 addr = pv_add_constant (addr, offset);
937 else
938 addr = pv_add_constant (addr, -offset);
940 if (pv_area_store_would_trash (stack, addr))
941 break;
943 pv_area_store (stack, addr, 4, regs[regno]);
945 if (inst2 & 0x0100)
946 regs[bits (insn, 0, 3)] = addr;
949 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
950 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
952 int regno = bits (inst2, 12, 15);
953 pv_t addr;
955 offset = inst2 & 0xfff;
956 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
958 if (pv_area_store_would_trash (stack, addr))
959 break;
961 pv_area_store (stack, addr, 4, regs[regno]);
964 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966 /* Ignore stores of argument registers to the stack. */
969 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
970 && (inst2 & 0x0d00) == 0x0c00
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
976 { registers } */
977 && (inst2 & 0x8000) == 0x0000
978 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
979 /* Ignore block loads from the stack, potentially copying
980 parameters from memory. */
983 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
984 [Rn, #+/-imm] */
985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
986 /* Similarly ignore dual loads from the stack. */
989 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
990 && (inst2 & 0x0d00) == 0x0c00
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore single loads from the stack. */
995 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
996 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
997 /* Similarly ignore single loads from the stack. */
1000 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1001 && (inst2 & 0x8000) == 0x0000)
1003 unsigned int imm = ((bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1007 regs[bits (inst2, 8, 11)]
1008 = pv_add_constant (regs[bits (insn, 0, 3)],
1009 thumb_expand_immediate (imm));
1012 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1013 && (inst2 & 0x8000) == 0x0000)
1015 unsigned int imm = ((bits (insn, 10, 10) << 11)
1016 | (bits (inst2, 12, 14) << 8)
1017 | bits (inst2, 0, 7));
1019 regs[bits (inst2, 8, 11)]
1020 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1023 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1024 && (inst2 & 0x8000) == 0x0000)
1026 unsigned int imm = ((bits (insn, 10, 10) << 11)
1027 | (bits (inst2, 12, 14) << 8)
1028 | bits (inst2, 0, 7));
1030 regs[bits (inst2, 8, 11)]
1031 = pv_add_constant (regs[bits (insn, 0, 3)],
1032 - (CORE_ADDR) thumb_expand_immediate (imm));
1035 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1036 && (inst2 & 0x8000) == 0x0000)
1038 unsigned int imm = ((bits (insn, 10, 10) << 11)
1039 | (bits (inst2, 12, 14) << 8)
1040 | bits (inst2, 0, 7));
1042 regs[bits (inst2, 8, 11)]
1043 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1046 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1048 unsigned int imm = ((bits (insn, 10, 10) << 11)
1049 | (bits (inst2, 12, 14) << 8)
1050 | bits (inst2, 0, 7));
1052 regs[bits (inst2, 8, 11)]
1053 = pv_constant (thumb_expand_immediate (imm));
1056 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1058 unsigned int imm
1059 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1061 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1064 else if (insn == 0xea5f /* mov.w Rd,Rm */
1065 && (inst2 & 0xf0f0) == 0)
1067 int dst_reg = (inst2 & 0x0f00) >> 8;
1068 int src_reg = inst2 & 0xf;
1069 regs[dst_reg] = regs[src_reg];
1072 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1074 /* Constant pool loads. */
1075 unsigned int constant;
1076 CORE_ADDR loc;
1078 offset = bits (inst2, 0, 11);
1079 if (insn & 0x0080)
1080 loc = start + 4 + offset;
1081 else
1082 loc = start + 4 - offset;
1084 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1085 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1088 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1090 /* Constant pool loads. */
1091 unsigned int constant;
1092 CORE_ADDR loc;
1094 offset = bits (inst2, 0, 7) << 2;
1095 if (insn & 0x0080)
1096 loc = start + 4 + offset;
1097 else
1098 loc = start + 4 - offset;
1100 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1101 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1103 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1104 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1107 else if (thumb2_instruction_changes_pc (insn, inst2))
1109 /* Don't scan past anything that might change control flow. */
1110 break;
1112 else
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc = start;
1119 start += 2;
1121 else if (thumb_instruction_changes_pc (insn))
1123 /* Don't scan past anything that might change control flow. */
1124 break;
1126 else
1128 /* The optimizer might shove anything into the prologue,
1129 so we just skip what we don't recognize. */
1130 unrecognized_pc = start;
1133 start += 2;
1136 if (arm_debug)
1137 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1138 paddress (gdbarch, start));
1140 if (unrecognized_pc == 0)
1141 unrecognized_pc = start;
1143 if (cache == NULL)
1145 do_cleanups (back_to);
1146 return unrecognized_pc;
1149 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1151 /* Frame pointer is fp. Frame size is constant. */
1152 cache->framereg = ARM_FP_REGNUM;
1153 cache->framesize = -regs[ARM_FP_REGNUM].k;
1155 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1157 /* Frame pointer is r7. Frame size is constant. */
1158 cache->framereg = THUMB_FP_REGNUM;
1159 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1161 else
1163 /* Try the stack pointer... this is a bit desperate. */
1164 cache->framereg = ARM_SP_REGNUM;
1165 cache->framesize = -regs[ARM_SP_REGNUM].k;
1168 for (i = 0; i < 16; i++)
1169 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1170 cache->saved_regs[i].addr = offset;
1172 do_cleanups (back_to);
1173 return unrecognized_pc;
1177 /* Try to analyze the instructions starting from PC, which load symbol
1178 __stack_chk_guard. Return the address of instruction after loading this
1179 symbol, set the dest register number to *BASEREG, and set the size of
1180 instructions for loading symbol in OFFSET. Return 0 if instructions are
1181 not recognized. */
1183 static CORE_ADDR
1184 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1185 unsigned int *destreg, int *offset)
1187 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1188 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1189 unsigned int low, high, address;
1191 address = 0;
1192 if (is_thumb)
1194 unsigned short insn1
1195 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1197 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1199 *destreg = bits (insn1, 8, 10);
1200 *offset = 2;
1201 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1202 address = read_memory_unsigned_integer (address, 4,
1203 byte_order_for_code);
1205 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1207 unsigned short insn2
1208 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1210 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1212 insn1
1213 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1214 insn2
1215 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1217 /* movt Rd, #const */
1218 if ((insn1 & 0xfbc0) == 0xf2c0)
1220 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1221 *destreg = bits (insn2, 8, 11);
1222 *offset = 8;
1223 address = (high << 16 | low);
1227 else
1229 unsigned int insn
1230 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1232 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1234 address = bits (insn, 0, 11) + pc + 8;
1235 address = read_memory_unsigned_integer (address, 4,
1236 byte_order_for_code);
1238 *destreg = bits (insn, 12, 15);
1239 *offset = 4;
1241 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1243 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1245 insn
1246 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1248 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1250 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1251 *destreg = bits (insn, 12, 15);
1252 *offset = 8;
1253 address = (high << 16 | low);
1258 return address;
1261 /* Try to skip a sequence of instructions used for stack protector. If PC
1262 points to the first instruction of this sequence, return the address of
1263 first instruction after this sequence, otherwise, return original PC.
1265 On arm, this sequence of instructions is composed of mainly three steps,
1266 Step 1: load symbol __stack_chk_guard,
1267 Step 2: load from address of __stack_chk_guard,
1268 Step 3: store it to somewhere else.
1270 Usually, instructions on step 2 and step 3 are the same on various ARM
1271 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1272 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1273 instructions in step 1 vary from different ARM architectures. On ARMv7,
1274 they are,
1276 movw Rn, #:lower16:__stack_chk_guard
1277 movt Rn, #:upper16:__stack_chk_guard
1279 On ARMv5t, it is,
1281 ldr Rn, .Label
1282 ....
1283 .Lable:
1284 .word __stack_chk_guard
1286 Since ldr/str is a very popular instruction, we can't use them as
1287 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1288 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1289 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1291 static CORE_ADDR
1292 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1294 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1295 unsigned int basereg;
1296 struct bound_minimal_symbol stack_chk_guard;
1297 int offset;
1298 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1299 CORE_ADDR addr;
1301 /* Try to parse the instructions in Step 1. */
1302 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1303 &basereg, &offset);
1304 if (!addr)
1305 return pc;
1307 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1308 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1309 Otherwise, this sequence cannot be for stack protector. */
1310 if (stack_chk_guard.minsym == NULL
1311 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1312 return pc;
1314 if (is_thumb)
1316 unsigned int destreg;
1317 unsigned short insn
1318 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1320 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1321 if ((insn & 0xf800) != 0x6800)
1322 return pc;
1323 if (bits (insn, 3, 5) != basereg)
1324 return pc;
1325 destreg = bits (insn, 0, 2);
1327 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1328 byte_order_for_code);
1329 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1330 if ((insn & 0xf800) != 0x6000)
1331 return pc;
1332 if (destreg != bits (insn, 0, 2))
1333 return pc;
1335 else
1337 unsigned int destreg;
1338 unsigned int insn
1339 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1341 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1342 if ((insn & 0x0e500000) != 0x04100000)
1343 return pc;
1344 if (bits (insn, 16, 19) != basereg)
1345 return pc;
1346 destreg = bits (insn, 12, 15);
1347 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1348 insn = read_memory_unsigned_integer (pc + offset + 4,
1349 4, byte_order_for_code);
1350 if ((insn & 0x0e500000) != 0x04000000)
1351 return pc;
1352 if (bits (insn, 12, 15) != destreg)
1353 return pc;
1355 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1356 on arm. */
1357 if (is_thumb)
1358 return pc + offset + 4;
1359 else
1360 return pc + offset + 8;
1363 /* Advance the PC across any function entry prologue instructions to
1364 reach some "real" code.
1366 The APCS (ARM Procedure Call Standard) defines the following
1367 prologue:
1369 mov ip, sp
1370 [stmfd sp!, {a1,a2,a3,a4}]
1371 stmfd sp!, {...,fp,ip,lr,pc}
1372 [stfe f7, [sp, #-12]!]
1373 [stfe f6, [sp, #-12]!]
1374 [stfe f5, [sp, #-12]!]
1375 [stfe f4, [sp, #-12]!]
1376 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1378 static CORE_ADDR
1379 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1381 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1382 unsigned long inst;
1383 CORE_ADDR func_addr, limit_pc;
1385 /* See if we can determine the end of the prologue via the symbol table.
1386 If so, then return either PC, or the PC after the prologue, whichever
1387 is greater. */
1388 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1390 CORE_ADDR post_prologue_pc
1391 = skip_prologue_using_sal (gdbarch, func_addr);
1392 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1394 if (post_prologue_pc)
1395 post_prologue_pc
1396 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1399 /* GCC always emits a line note before the prologue and another
1400 one after, even if the two are at the same address or on the
1401 same line. Take advantage of this so that we do not need to
1402 know every instruction that might appear in the prologue. We
1403 will have producer information for most binaries; if it is
1404 missing (e.g. for -gstabs), assuming the GNU tools. */
1405 if (post_prologue_pc
1406 && (cust == NULL
1407 || COMPUNIT_PRODUCER (cust) == NULL
1408 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1409 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1410 return post_prologue_pc;
1412 if (post_prologue_pc != 0)
1414 CORE_ADDR analyzed_limit;
1416 /* For non-GCC compilers, make sure the entire line is an
1417 acceptable prologue; GDB will round this function's
1418 return value up to the end of the following line so we
1419 can not skip just part of a line (and we do not want to).
1421 RealView does not treat the prologue specially, but does
1422 associate prologue code with the opening brace; so this
1423 lets us skip the first line if we think it is the opening
1424 brace. */
1425 if (arm_pc_is_thumb (gdbarch, func_addr))
1426 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1427 post_prologue_pc, NULL);
1428 else
1429 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1430 post_prologue_pc, NULL);
1432 if (analyzed_limit != post_prologue_pc)
1433 return func_addr;
1435 return post_prologue_pc;
1439 /* Can't determine prologue from the symbol table, need to examine
1440 instructions. */
1442 /* Find an upper limit on the function prologue using the debug
1443 information. If the debug information could not be used to provide
1444 that bound, then use an arbitrary large number as the upper bound. */
1445 /* Like arm_scan_prologue, stop no later than pc + 64. */
1446 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1447 if (limit_pc == 0)
1448 limit_pc = pc + 64; /* Magic. */
1451 /* Check if this is Thumb code. */
1452 if (arm_pc_is_thumb (gdbarch, pc))
1453 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1454 else
1455 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1458 /* *INDENT-OFF* */
1459 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1460 This function decodes a Thumb function prologue to determine:
1461 1) the size of the stack frame
1462 2) which registers are saved on it
1463 3) the offsets of saved regs
1464 4) the offset from the stack pointer to the frame pointer
1466 A typical Thumb function prologue would create this stack frame
1467 (offsets relative to FP)
1468 old SP -> 24 stack parameters
1469 20 LR
1470 16 R7
1471 R7 -> 0 local variables (16 bytes)
1472 SP -> -12 additional stack space (12 bytes)
1473 The frame size would thus be 36 bytes, and the frame offset would be
1474 12 bytes. The frame register is R7.
1476 The comments for thumb_skip_prolog() describe the algorithm we use
1477 to detect the end of the prolog. */
1478 /* *INDENT-ON* */
1480 static void
1481 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1482 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1484 CORE_ADDR prologue_start;
1485 CORE_ADDR prologue_end;
1487 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1488 &prologue_end))
1490 /* See comment in arm_scan_prologue for an explanation of
1491 this heuristics. */
1492 if (prologue_end > prologue_start + 64)
1494 prologue_end = prologue_start + 64;
1497 else
1498 /* We're in the boondocks: we have no idea where the start of the
1499 function is. */
1500 return;
1502 prologue_end = min (prologue_end, prev_pc);
1504 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1507 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1509 static int
1510 arm_instruction_changes_pc (uint32_t this_instr)
1512 if (bits (this_instr, 28, 31) == INST_NV)
1513 /* Unconditional instructions. */
1514 switch (bits (this_instr, 24, 27))
1516 case 0xa:
1517 case 0xb:
1518 /* Branch with Link and change to Thumb. */
1519 return 1;
1520 case 0xc:
1521 case 0xd:
1522 case 0xe:
1523 /* Coprocessor register transfer. */
1524 if (bits (this_instr, 12, 15) == 15)
1525 error (_("Invalid update to pc in instruction"));
1526 return 0;
1527 default:
1528 return 0;
1530 else
1531 switch (bits (this_instr, 25, 27))
1533 case 0x0:
1534 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1536 /* Multiplies and extra load/stores. */
1537 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1538 /* Neither multiplies nor extension load/stores are allowed
1539 to modify PC. */
1540 return 0;
1542 /* Otherwise, miscellaneous instructions. */
1544 /* BX <reg>, BXJ <reg>, BLX <reg> */
1545 if (bits (this_instr, 4, 27) == 0x12fff1
1546 || bits (this_instr, 4, 27) == 0x12fff2
1547 || bits (this_instr, 4, 27) == 0x12fff3)
1548 return 1;
1550 /* Other miscellaneous instructions are unpredictable if they
1551 modify PC. */
1552 return 0;
1554 /* Data processing instruction. Fall through. */
1556 case 0x1:
1557 if (bits (this_instr, 12, 15) == 15)
1558 return 1;
1559 else
1560 return 0;
1562 case 0x2:
1563 case 0x3:
1564 /* Media instructions and architecturally undefined instructions. */
1565 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1566 return 0;
1568 /* Stores. */
1569 if (bit (this_instr, 20) == 0)
1570 return 0;
1572 /* Loads. */
1573 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1574 return 1;
1575 else
1576 return 0;
1578 case 0x4:
1579 /* Load/store multiple. */
1580 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1581 return 1;
1582 else
1583 return 0;
1585 case 0x5:
1586 /* Branch and branch with link. */
1587 return 1;
1589 case 0x6:
1590 case 0x7:
1591 /* Coprocessor transfers or SWIs can not affect PC. */
1592 return 0;
1594 default:
1595 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1599 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1600 otherwise. */
1602 static int
1603 arm_instruction_restores_sp (unsigned int insn)
1605 if (bits (insn, 28, 31) != INST_NV)
1607 if ((insn & 0x0df0f000) == 0x0080d000
1608 /* ADD SP (register or immediate). */
1609 || (insn & 0x0df0f000) == 0x0040d000
1610 /* SUB SP (register or immediate). */
1611 || (insn & 0x0ffffff0) == 0x01a0d000
1612 /* MOV SP. */
1613 || (insn & 0x0fff0000) == 0x08bd0000
1614 /* POP (LDMIA). */
1615 || (insn & 0x0fff0000) == 0x049d0000)
1616 /* POP of a single register. */
1617 return 1;
1620 return 0;
1623 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1624 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1625 fill it in. Return the first address not recognized as a prologue
1626 instruction.
1628 We recognize all the instructions typically found in ARM prologues,
1629 plus harmless instructions which can be skipped (either for analysis
1630 purposes, or a more restrictive set that can be skipped when finding
1631 the end of the prologue). */
1633 static CORE_ADDR
1634 arm_analyze_prologue (struct gdbarch *gdbarch,
1635 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1636 struct arm_prologue_cache *cache)
1638 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1639 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1640 int regno;
1641 CORE_ADDR offset, current_pc;
1642 pv_t regs[ARM_FPS_REGNUM];
1643 struct pv_area *stack;
1644 struct cleanup *back_to;
1645 CORE_ADDR unrecognized_pc = 0;
1647 /* Search the prologue looking for instructions that set up the
1648 frame pointer, adjust the stack pointer, and save registers.
1650 Be careful, however, and if it doesn't look like a prologue,
1651 don't try to scan it. If, for instance, a frameless function
1652 begins with stmfd sp!, then we will tell ourselves there is
1653 a frame, which will confuse stack traceback, as well as "finish"
1654 and other operations that rely on a knowledge of the stack
1655 traceback. */
1657 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1658 regs[regno] = pv_register (regno, 0);
1659 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1660 back_to = make_cleanup_free_pv_area (stack);
1662 for (current_pc = prologue_start;
1663 current_pc < prologue_end;
1664 current_pc += 4)
1666 unsigned int insn
1667 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1669 if (insn == 0xe1a0c00d) /* mov ip, sp */
1671 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1672 continue;
1674 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1677 unsigned imm = insn & 0xff; /* immediate value */
1678 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1679 int rd = bits (insn, 12, 15);
1680 imm = (imm >> rot) | (imm << (32 - rot));
1681 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1682 continue;
1684 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1687 unsigned imm = insn & 0xff; /* immediate value */
1688 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1689 int rd = bits (insn, 12, 15);
1690 imm = (imm >> rot) | (imm << (32 - rot));
1691 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1692 continue;
1694 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1695 [sp, #-4]! */
1697 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1698 break;
1699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1700 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1701 regs[bits (insn, 12, 15)]);
1702 continue;
1704 else if ((insn & 0xffff0000) == 0xe92d0000)
1705 /* stmfd sp!, {..., fp, ip, lr, pc}
1707 stmfd sp!, {a1, a2, a3, a4} */
1709 int mask = insn & 0xffff;
1711 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1712 break;
1714 /* Calculate offsets of saved registers. */
1715 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1716 if (mask & (1 << regno))
1718 regs[ARM_SP_REGNUM]
1719 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1720 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1723 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1724 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1725 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1727 /* No need to add this to saved_regs -- it's just an arg reg. */
1728 continue;
1730 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1731 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1732 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1735 continue;
1737 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1738 { registers } */
1739 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1741 /* No need to add this to saved_regs -- it's just arg regs. */
1742 continue;
1744 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1746 unsigned imm = insn & 0xff; /* immediate value */
1747 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1748 imm = (imm >> rot) | (imm << (32 - rot));
1749 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1751 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1753 unsigned imm = insn & 0xff; /* immediate value */
1754 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1755 imm = (imm >> rot) | (imm << (32 - rot));
1756 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1758 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1759 [sp, -#c]! */
1760 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1762 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1763 break;
1765 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1766 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1767 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1769 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1770 [sp!] */
1771 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1773 int n_saved_fp_regs;
1774 unsigned int fp_start_reg, fp_bound_reg;
1776 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1777 break;
1779 if ((insn & 0x800) == 0x800) /* N0 is set */
1781 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1782 n_saved_fp_regs = 3;
1783 else
1784 n_saved_fp_regs = 1;
1786 else
1788 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs = 2;
1790 else
1791 n_saved_fp_regs = 4;
1794 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1795 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1796 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1798 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1799 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1800 regs[fp_start_reg++]);
1803 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1805 /* Allow some special function calls when skipping the
1806 prologue; GCC generates these before storing arguments to
1807 the stack. */
1808 CORE_ADDR dest = BranchDest (current_pc, insn);
1810 if (skip_prologue_function (gdbarch, dest, 0))
1811 continue;
1812 else
1813 break;
1815 else if ((insn & 0xf0000000) != 0xe0000000)
1816 break; /* Condition not true, exit early. */
1817 else if (arm_instruction_changes_pc (insn))
1818 /* Don't scan past anything that might change control flow. */
1819 break;
1820 else if (arm_instruction_restores_sp (insn))
1822 /* Don't scan past the epilogue. */
1823 break;
1825 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1826 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1827 /* Ignore block loads from the stack, potentially copying
1828 parameters from memory. */
1829 continue;
1830 else if ((insn & 0xfc500000) == 0xe4100000
1831 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1832 /* Similarly ignore single loads from the stack. */
1833 continue;
1834 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1835 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1836 register instead of the stack. */
1837 continue;
1838 else
1840 /* The optimizer might shove anything into the prologue, if
1841 we build up cache (cache != NULL) from scanning prologue,
1842 we just skip what we don't recognize and scan further to
1843 make cache as complete as possible. However, if we skip
1844 prologue, we'll stop immediately on unrecognized
1845 instruction. */
1846 unrecognized_pc = current_pc;
1847 if (cache != NULL)
1848 continue;
1849 else
1850 break;
1854 if (unrecognized_pc == 0)
1855 unrecognized_pc = current_pc;
1857 if (cache)
1859 int framereg, framesize;
1861 /* The frame size is just the distance from the frame register
1862 to the original stack pointer. */
1863 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1865 /* Frame pointer is fp. */
1866 framereg = ARM_FP_REGNUM;
1867 framesize = -regs[ARM_FP_REGNUM].k;
1869 else
1871 /* Try the stack pointer... this is a bit desperate. */
1872 framereg = ARM_SP_REGNUM;
1873 framesize = -regs[ARM_SP_REGNUM].k;
1876 cache->framereg = framereg;
1877 cache->framesize = framesize;
1879 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1880 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1881 cache->saved_regs[regno].addr = offset;
1884 if (arm_debug)
1885 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1886 paddress (gdbarch, unrecognized_pc));
1888 do_cleanups (back_to);
1889 return unrecognized_pc;
1892 static void
1893 arm_scan_prologue (struct frame_info *this_frame,
1894 struct arm_prologue_cache *cache)
1896 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1897 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1898 int regno;
1899 CORE_ADDR prologue_start, prologue_end, current_pc;
1900 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1901 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1902 pv_t regs[ARM_FPS_REGNUM];
1903 struct pv_area *stack;
1904 struct cleanup *back_to;
1905 CORE_ADDR offset;
1907 /* Assume there is no frame until proven otherwise. */
1908 cache->framereg = ARM_SP_REGNUM;
1909 cache->framesize = 0;
1911 /* Check for Thumb prologue. */
1912 if (arm_frame_is_thumb (this_frame))
1914 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1915 return;
1918 /* Find the function prologue. If we can't find the function in
1919 the symbol table, peek in the stack frame to find the PC. */
1920 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1921 &prologue_end))
1923 /* One way to find the end of the prologue (which works well
1924 for unoptimized code) is to do the following:
1926 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1928 if (sal.line == 0)
1929 prologue_end = prev_pc;
1930 else if (sal.end < prologue_end)
1931 prologue_end = sal.end;
1933 This mechanism is very accurate so long as the optimizer
1934 doesn't move any instructions from the function body into the
1935 prologue. If this happens, sal.end will be the last
1936 instruction in the first hunk of prologue code just before
1937 the first instruction that the scheduler has moved from
1938 the body to the prologue.
1940 In order to make sure that we scan all of the prologue
1941 instructions, we use a slightly less accurate mechanism which
1942 may scan more than necessary. To help compensate for this
1943 lack of accuracy, the prologue scanning loop below contains
1944 several clauses which'll cause the loop to terminate early if
1945 an implausible prologue instruction is encountered.
1947 The expression
1949 prologue_start + 64
1951 is a suitable endpoint since it accounts for the largest
1952 possible prologue plus up to five instructions inserted by
1953 the scheduler. */
1955 if (prologue_end > prologue_start + 64)
1957 prologue_end = prologue_start + 64; /* See above. */
1960 else
1962 /* We have no symbol information. Our only option is to assume this
1963 function has a standard stack frame and the normal frame register.
1964 Then, we can find the value of our frame pointer on entrance to
1965 the callee (or at the present moment if this is the innermost frame).
1966 The value stored there should be the address of the stmfd + 8. */
1967 CORE_ADDR frame_loc;
1968 LONGEST return_value;
1970 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1971 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1972 return;
1973 else
1975 prologue_start = gdbarch_addr_bits_remove
1976 (gdbarch, return_value) - 8;
1977 prologue_end = prologue_start + 64; /* See above. */
1981 if (prev_pc < prologue_end)
1982 prologue_end = prev_pc;
1984 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1987 static struct arm_prologue_cache *
1988 arm_make_prologue_cache (struct frame_info *this_frame)
1990 int reg;
1991 struct arm_prologue_cache *cache;
1992 CORE_ADDR unwound_fp;
1994 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1995 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1997 arm_scan_prologue (this_frame, cache);
1999 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2000 if (unwound_fp == 0)
2001 return cache;
2003 cache->prev_sp = unwound_fp + cache->framesize;
2005 /* Calculate actual addresses of saved registers using offsets
2006 determined by arm_scan_prologue. */
2007 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2008 if (trad_frame_addr_p (cache->saved_regs, reg))
2009 cache->saved_regs[reg].addr += cache->prev_sp;
2011 return cache;
2014 /* Implementation of the stop_reason hook for arm_prologue frames. */
2016 static enum unwind_stop_reason
2017 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2018 void **this_cache)
2020 struct arm_prologue_cache *cache;
2021 CORE_ADDR pc;
2023 if (*this_cache == NULL)
2024 *this_cache = arm_make_prologue_cache (this_frame);
2025 cache = (struct arm_prologue_cache *) *this_cache;
2027 /* This is meant to halt the backtrace at "_start". */
2028 pc = get_frame_pc (this_frame);
2029 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2030 return UNWIND_OUTERMOST;
2032 /* If we've hit a wall, stop. */
2033 if (cache->prev_sp == 0)
2034 return UNWIND_OUTERMOST;
2036 return UNWIND_NO_REASON;
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2042 static void
2043 arm_prologue_this_id (struct frame_info *this_frame,
2044 void **this_cache,
2045 struct frame_id *this_id)
2047 struct arm_prologue_cache *cache;
2048 struct frame_id id;
2049 CORE_ADDR pc, func;
2051 if (*this_cache == NULL)
2052 *this_cache = arm_make_prologue_cache (this_frame);
2053 cache = (struct arm_prologue_cache *) *this_cache;
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 pc = get_frame_pc (this_frame);
2059 func = get_frame_func (this_frame);
2060 if (!func)
2061 func = pc;
2063 id = frame_id_build (cache->prev_sp, func);
2064 *this_id = id;
2067 static struct value *
2068 arm_prologue_prev_register (struct frame_info *this_frame,
2069 void **this_cache,
2070 int prev_regnum)
2072 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2073 struct arm_prologue_cache *cache;
2075 if (*this_cache == NULL)
2076 *this_cache = arm_make_prologue_cache (this_frame);
2077 cache = (struct arm_prologue_cache *) *this_cache;
2079 /* If we are asked to unwind the PC, then we need to return the LR
2080 instead. The prologue may save PC, but it will point into this
2081 frame's prologue, not the next frame's resume location. Also
2082 strip the saved T bit. A valid LR may have the low bit set, but
2083 a valid PC never does. */
2084 if (prev_regnum == ARM_PC_REGNUM)
2086 CORE_ADDR lr;
2088 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2089 return frame_unwind_got_constant (this_frame, prev_regnum,
2090 arm_addr_bits_remove (gdbarch, lr));
2093 /* SP is generally not saved to the stack, but this frame is
2094 identified by the next frame's stack pointer at the time of the call.
2095 The value was already reconstructed into PREV_SP. */
2096 if (prev_regnum == ARM_SP_REGNUM)
2097 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2099 /* The CPSR may have been changed by the call instruction and by the
2100 called function. The only bit we can reconstruct is the T bit,
2101 by checking the low bit of LR as of the call. This is a reliable
2102 indicator of Thumb-ness except for some ARM v4T pre-interworking
2103 Thumb code, which could get away with a clear low bit as long as
2104 the called function did not use bx. Guess that all other
2105 bits are unchanged; the condition flags are presumably lost,
2106 but the processor status is likely valid. */
2107 if (prev_regnum == ARM_PS_REGNUM)
2109 CORE_ADDR lr, cpsr;
2110 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2112 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2113 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2114 if (IS_THUMB_ADDR (lr))
2115 cpsr |= t_bit;
2116 else
2117 cpsr &= ~t_bit;
2118 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2121 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2122 prev_regnum);
2125 struct frame_unwind arm_prologue_unwind = {
2126 NORMAL_FRAME,
2127 arm_prologue_unwind_stop_reason,
2128 arm_prologue_this_id,
2129 arm_prologue_prev_register,
2130 NULL,
2131 default_frame_sniffer
2134 /* Maintain a list of ARM exception table entries per objfile, similar to the
2135 list of mapping symbols. We only cache entries for standard ARM-defined
2136 personality routines; the cache will contain only the frame unwinding
2137 instructions associated with the entry (not the descriptors). */
2139 static const struct objfile_data *arm_exidx_data_key;
2141 struct arm_exidx_entry
2143 bfd_vma addr;
2144 gdb_byte *entry;
2146 typedef struct arm_exidx_entry arm_exidx_entry_s;
2147 DEF_VEC_O(arm_exidx_entry_s);
2149 struct arm_exidx_data
2151 VEC(arm_exidx_entry_s) **section_maps;
2154 static void
2155 arm_exidx_data_free (struct objfile *objfile, void *arg)
2157 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2158 unsigned int i;
2160 for (i = 0; i < objfile->obfd->section_count; i++)
2161 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2164 static inline int
2165 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2166 const struct arm_exidx_entry *rhs)
2168 return lhs->addr < rhs->addr;
2171 static struct obj_section *
2172 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2174 struct obj_section *osect;
2176 ALL_OBJFILE_OSECTIONS (objfile, osect)
2177 if (bfd_get_section_flags (objfile->obfd,
2178 osect->the_bfd_section) & SEC_ALLOC)
2180 bfd_vma start, size;
2181 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2182 size = bfd_get_section_size (osect->the_bfd_section);
2184 if (start <= vma && vma < start + size)
2185 return osect;
2188 return NULL;
2191 /* Parse contents of exception table and exception index sections
2192 of OBJFILE, and fill in the exception table entry cache.
2194 For each entry that refers to a standard ARM-defined personality
2195 routine, extract the frame unwinding instructions (from either
2196 the index or the table section). The unwinding instructions
2197 are normalized by:
2198 - extracting them from the rest of the table data
2199 - converting to host endianness
2200 - appending the implicit 0xb0 ("Finish") code
2202 The extracted and normalized instructions are stored for later
2203 retrieval by the arm_find_exidx_entry routine. */
2205 static void
2206 arm_exidx_new_objfile (struct objfile *objfile)
2208 struct cleanup *cleanups;
2209 struct arm_exidx_data *data;
2210 asection *exidx, *extab;
2211 bfd_vma exidx_vma = 0, extab_vma = 0;
2212 bfd_size_type exidx_size = 0, extab_size = 0;
2213 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2214 LONGEST i;
2216 /* If we've already touched this file, do nothing. */
2217 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2218 return;
2219 cleanups = make_cleanup (null_cleanup, NULL);
2221 /* Read contents of exception table and index. */
2222 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2223 if (exidx)
2225 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2226 exidx_size = bfd_get_section_size (exidx);
2227 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2228 make_cleanup (xfree, exidx_data);
2230 if (!bfd_get_section_contents (objfile->obfd, exidx,
2231 exidx_data, 0, exidx_size))
2233 do_cleanups (cleanups);
2234 return;
2238 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2239 if (extab)
2241 extab_vma = bfd_section_vma (objfile->obfd, extab);
2242 extab_size = bfd_get_section_size (extab);
2243 extab_data = (gdb_byte *) xmalloc (extab_size);
2244 make_cleanup (xfree, extab_data);
2246 if (!bfd_get_section_contents (objfile->obfd, extab,
2247 extab_data, 0, extab_size))
2249 do_cleanups (cleanups);
2250 return;
2254 /* Allocate exception table data structure. */
2255 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2256 set_objfile_data (objfile, arm_exidx_data_key, data);
2257 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2258 objfile->obfd->section_count,
2259 VEC(arm_exidx_entry_s) *);
2261 /* Fill in exception table. */
2262 for (i = 0; i < exidx_size / 8; i++)
2264 struct arm_exidx_entry new_exidx_entry;
2265 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2266 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2267 bfd_vma addr = 0, word = 0;
2268 int n_bytes = 0, n_words = 0;
2269 struct obj_section *sec;
2270 gdb_byte *entry = NULL;
2272 /* Extract address of start of function. */
2273 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2274 idx += exidx_vma + i * 8;
2276 /* Find section containing function and compute section offset. */
2277 sec = arm_obj_section_from_vma (objfile, idx);
2278 if (sec == NULL)
2279 continue;
2280 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2282 /* Determine address of exception table entry. */
2283 if (val == 1)
2285 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2287 else if ((val & 0xff000000) == 0x80000000)
2289 /* Exception table entry embedded in .ARM.exidx
2290 -- must be short form. */
2291 word = val;
2292 n_bytes = 3;
2294 else if (!(val & 0x80000000))
2296 /* Exception table entry in .ARM.extab. */
2297 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2298 addr += exidx_vma + i * 8 + 4;
2300 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2302 word = bfd_h_get_32 (objfile->obfd,
2303 extab_data + addr - extab_vma);
2304 addr += 4;
2306 if ((word & 0xff000000) == 0x80000000)
2308 /* Short form. */
2309 n_bytes = 3;
2311 else if ((word & 0xff000000) == 0x81000000
2312 || (word & 0xff000000) == 0x82000000)
2314 /* Long form. */
2315 n_bytes = 2;
2316 n_words = ((word >> 16) & 0xff);
2318 else if (!(word & 0x80000000))
2320 bfd_vma pers;
2321 struct obj_section *pers_sec;
2322 int gnu_personality = 0;
2324 /* Custom personality routine. */
2325 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2326 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2328 /* Check whether we've got one of the variants of the
2329 GNU personality routines. */
2330 pers_sec = arm_obj_section_from_vma (objfile, pers);
2331 if (pers_sec)
2333 static const char *personality[] =
2335 "__gcc_personality_v0",
2336 "__gxx_personality_v0",
2337 "__gcj_personality_v0",
2338 "__gnu_objc_personality_v0",
2339 NULL
2342 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2343 int k;
2345 for (k = 0; personality[k]; k++)
2346 if (lookup_minimal_symbol_by_pc_name
2347 (pc, personality[k], objfile))
2349 gnu_personality = 1;
2350 break;
2354 /* If so, the next word contains a word count in the high
2355 byte, followed by the same unwind instructions as the
2356 pre-defined forms. */
2357 if (gnu_personality
2358 && addr + 4 <= extab_vma + extab_size)
2360 word = bfd_h_get_32 (objfile->obfd,
2361 extab_data + addr - extab_vma);
2362 addr += 4;
2363 n_bytes = 3;
2364 n_words = ((word >> 24) & 0xff);
2370 /* Sanity check address. */
2371 if (n_words)
2372 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2373 n_words = n_bytes = 0;
2375 /* The unwind instructions reside in WORD (only the N_BYTES least
2376 significant bytes are valid), followed by N_WORDS words in the
2377 extab section starting at ADDR. */
2378 if (n_bytes || n_words)
2380 gdb_byte *p = entry
2381 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2382 n_bytes + n_words * 4 + 1);
2384 while (n_bytes--)
2385 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2387 while (n_words--)
2389 word = bfd_h_get_32 (objfile->obfd,
2390 extab_data + addr - extab_vma);
2391 addr += 4;
2393 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2394 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2395 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2396 *p++ = (gdb_byte) (word & 0xff);
2399 /* Implied "Finish" to terminate the list. */
2400 *p++ = 0xb0;
2403 /* Push entry onto vector. They are guaranteed to always
2404 appear in order of increasing addresses. */
2405 new_exidx_entry.addr = idx;
2406 new_exidx_entry.entry = entry;
2407 VEC_safe_push (arm_exidx_entry_s,
2408 data->section_maps[sec->the_bfd_section->index],
2409 &new_exidx_entry);
2412 do_cleanups (cleanups);
2415 /* Search for the exception table entry covering MEMADDR. If one is found,
2416 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2417 set *START to the start of the region covered by this entry. */
2419 static gdb_byte *
2420 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2422 struct obj_section *sec;
2424 sec = find_pc_section (memaddr);
2425 if (sec != NULL)
2427 struct arm_exidx_data *data;
2428 VEC(arm_exidx_entry_s) *map;
2429 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2430 unsigned int idx;
2432 data = ((struct arm_exidx_data *)
2433 objfile_data (sec->objfile, arm_exidx_data_key));
2434 if (data != NULL)
2436 map = data->section_maps[sec->the_bfd_section->index];
2437 if (!VEC_empty (arm_exidx_entry_s, map))
2439 struct arm_exidx_entry *map_sym;
2441 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2442 arm_compare_exidx_entries);
2444 /* VEC_lower_bound finds the earliest ordered insertion
2445 point. If the following symbol starts at this exact
2446 address, we use that; otherwise, the preceding
2447 exception table entry covers this address. */
2448 if (idx < VEC_length (arm_exidx_entry_s, map))
2450 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2451 if (map_sym->addr == map_key.addr)
2453 if (start)
2454 *start = map_sym->addr + obj_section_addr (sec);
2455 return map_sym->entry;
2459 if (idx > 0)
2461 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2462 if (start)
2463 *start = map_sym->addr + obj_section_addr (sec);
2464 return map_sym->entry;
2470 return NULL;
2473 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2474 instruction list from the ARM exception table entry ENTRY, allocate and
2475 return a prologue cache structure describing how to unwind this frame.
2477 Return NULL if the unwinding instruction list contains a "spare",
2478 "reserved" or "refuse to unwind" instruction as defined in section
2479 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2480 for the ARM Architecture" document. */
2482 static struct arm_prologue_cache *
2483 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2485 CORE_ADDR vsp = 0;
2486 int vsp_valid = 0;
2488 struct arm_prologue_cache *cache;
2489 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2490 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2492 for (;;)
2494 gdb_byte insn;
2496 /* Whenever we reload SP, we actually have to retrieve its
2497 actual value in the current frame. */
2498 if (!vsp_valid)
2500 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2502 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2503 vsp = get_frame_register_unsigned (this_frame, reg);
2505 else
2507 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2508 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2511 vsp_valid = 1;
2514 /* Decode next unwind instruction. */
2515 insn = *entry++;
2517 if ((insn & 0xc0) == 0)
2519 int offset = insn & 0x3f;
2520 vsp += (offset << 2) + 4;
2522 else if ((insn & 0xc0) == 0x40)
2524 int offset = insn & 0x3f;
2525 vsp -= (offset << 2) + 4;
2527 else if ((insn & 0xf0) == 0x80)
2529 int mask = ((insn & 0xf) << 8) | *entry++;
2530 int i;
2532 /* The special case of an all-zero mask identifies
2533 "Refuse to unwind". We return NULL to fall back
2534 to the prologue analyzer. */
2535 if (mask == 0)
2536 return NULL;
2538 /* Pop registers r4..r15 under mask. */
2539 for (i = 0; i < 12; i++)
2540 if (mask & (1 << i))
2542 cache->saved_regs[4 + i].addr = vsp;
2543 vsp += 4;
2546 /* Special-case popping SP -- we need to reload vsp. */
2547 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2548 vsp_valid = 0;
2550 else if ((insn & 0xf0) == 0x90)
2552 int reg = insn & 0xf;
2554 /* Reserved cases. */
2555 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2556 return NULL;
2558 /* Set SP from another register and mark VSP for reload. */
2559 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2560 vsp_valid = 0;
2562 else if ((insn & 0xf0) == 0xa0)
2564 int count = insn & 0x7;
2565 int pop_lr = (insn & 0x8) != 0;
2566 int i;
2568 /* Pop r4..r[4+count]. */
2569 for (i = 0; i <= count; i++)
2571 cache->saved_regs[4 + i].addr = vsp;
2572 vsp += 4;
2575 /* If indicated by flag, pop LR as well. */
2576 if (pop_lr)
2578 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2579 vsp += 4;
2582 else if (insn == 0xb0)
2584 /* We could only have updated PC by popping into it; if so, it
2585 will show up as address. Otherwise, copy LR into PC. */
2586 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2587 cache->saved_regs[ARM_PC_REGNUM]
2588 = cache->saved_regs[ARM_LR_REGNUM];
2590 /* We're done. */
2591 break;
2593 else if (insn == 0xb1)
2595 int mask = *entry++;
2596 int i;
2598 /* All-zero mask and mask >= 16 is "spare". */
2599 if (mask == 0 || mask >= 16)
2600 return NULL;
2602 /* Pop r0..r3 under mask. */
2603 for (i = 0; i < 4; i++)
2604 if (mask & (1 << i))
2606 cache->saved_regs[i].addr = vsp;
2607 vsp += 4;
2610 else if (insn == 0xb2)
2612 ULONGEST offset = 0;
2613 unsigned shift = 0;
2617 offset |= (*entry & 0x7f) << shift;
2618 shift += 7;
2620 while (*entry++ & 0x80);
2622 vsp += 0x204 + (offset << 2);
2624 else if (insn == 0xb3)
2626 int start = *entry >> 4;
2627 int count = (*entry++) & 0xf;
2628 int i;
2630 /* Only registers D0..D15 are valid here. */
2631 if (start + count >= 16)
2632 return NULL;
2634 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2635 for (i = 0; i <= count; i++)
2637 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2638 vsp += 8;
2641 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2642 vsp += 4;
2644 else if ((insn & 0xf8) == 0xb8)
2646 int count = insn & 0x7;
2647 int i;
2649 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2650 for (i = 0; i <= count; i++)
2652 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2653 vsp += 8;
2656 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2657 vsp += 4;
2659 else if (insn == 0xc6)
2661 int start = *entry >> 4;
2662 int count = (*entry++) & 0xf;
2663 int i;
2665 /* Only registers WR0..WR15 are valid. */
2666 if (start + count >= 16)
2667 return NULL;
2669 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2670 for (i = 0; i <= count; i++)
2672 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2673 vsp += 8;
2676 else if (insn == 0xc7)
2678 int mask = *entry++;
2679 int i;
2681 /* All-zero mask and mask >= 16 is "spare". */
2682 if (mask == 0 || mask >= 16)
2683 return NULL;
2685 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2686 for (i = 0; i < 4; i++)
2687 if (mask & (1 << i))
2689 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2690 vsp += 4;
2693 else if ((insn & 0xf8) == 0xc0)
2695 int count = insn & 0x7;
2696 int i;
2698 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2699 for (i = 0; i <= count; i++)
2701 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2702 vsp += 8;
2705 else if (insn == 0xc8)
2707 int start = *entry >> 4;
2708 int count = (*entry++) & 0xf;
2709 int i;
2711 /* Only registers D0..D31 are valid. */
2712 if (start + count >= 16)
2713 return NULL;
2715 /* Pop VFP double-precision registers
2716 D[16+start]..D[16+start+count]. */
2717 for (i = 0; i <= count; i++)
2719 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2720 vsp += 8;
2723 else if (insn == 0xc9)
2725 int start = *entry >> 4;
2726 int count = (*entry++) & 0xf;
2727 int i;
2729 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2730 for (i = 0; i <= count; i++)
2732 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2733 vsp += 8;
2736 else if ((insn & 0xf8) == 0xd0)
2738 int count = insn & 0x7;
2739 int i;
2741 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2742 for (i = 0; i <= count; i++)
2744 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2745 vsp += 8;
2748 else
2750 /* Everything else is "spare". */
2751 return NULL;
2755 /* If we restore SP from a register, assume this was the frame register.
2756 Otherwise just fall back to SP as frame register. */
2757 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2758 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2759 else
2760 cache->framereg = ARM_SP_REGNUM;
2762 /* Determine offset to previous frame. */
2763 cache->framesize
2764 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2766 /* We already got the previous SP. */
2767 cache->prev_sp = vsp;
2769 return cache;
2772 /* Unwinding via ARM exception table entries. Note that the sniffer
2773 already computes a filled-in prologue cache, which is then used
2774 with the same arm_prologue_this_id and arm_prologue_prev_register
2775 routines also used for prologue-parsing based unwinding. */
2777 static int
2778 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2782 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2783 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2784 CORE_ADDR addr_in_block, exidx_region, func_start;
2785 struct arm_prologue_cache *cache;
2786 gdb_byte *entry;
2788 /* See if we have an ARM exception table entry covering this address. */
2789 addr_in_block = get_frame_address_in_block (this_frame);
2790 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2791 if (!entry)
2792 return 0;
2794 /* The ARM exception table does not describe unwind information
2795 for arbitrary PC values, but is guaranteed to be correct only
2796 at call sites. We have to decide here whether we want to use
2797 ARM exception table information for this frame, or fall back
2798 to using prologue parsing. (Note that if we have DWARF CFI,
2799 this sniffer isn't even called -- CFI is always preferred.)
2801 Before we make this decision, however, we check whether we
2802 actually have *symbol* information for the current frame.
2803 If not, prologue parsing would not work anyway, so we might
2804 as well use the exception table and hope for the best. */
2805 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2807 int exc_valid = 0;
2809 /* If the next frame is "normal", we are at a call site in this
2810 frame, so exception information is guaranteed to be valid. */
2811 if (get_next_frame (this_frame)
2812 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2813 exc_valid = 1;
2815 /* We also assume exception information is valid if we're currently
2816 blocked in a system call. The system library is supposed to
2817 ensure this, so that e.g. pthread cancellation works.
2819 But before verifying the instruction at the point of call, make
2820 sure this_frame is actually making a call (or, said differently,
2821 that it is not the innermost frame). For that, we compare
2822 this_frame's PC vs this_frame's addr_in_block. If equal, it means
2823 there is no call (otherwise, the PC would be the return address,
2824 which is the instruction after the call). */
2826 if (get_frame_pc (this_frame) != addr_in_block)
2828 if (arm_frame_is_thumb (this_frame))
2830 LONGEST insn;
2832 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2833 byte_order_for_code, &insn)
2834 && (insn & 0xff00) == 0xdf00 /* svc */)
2835 exc_valid = 1;
2837 else
2839 LONGEST insn;
2841 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2842 byte_order_for_code, &insn)
2843 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2844 exc_valid = 1;
2848 /* Bail out if we don't know that exception information is valid. */
2849 if (!exc_valid)
2850 return 0;
2852 /* The ARM exception index does not mark the *end* of the region
2853 covered by the entry, and some functions will not have any entry.
2854 To correctly recognize the end of the covered region, the linker
2855 should have inserted dummy records with a CANTUNWIND marker.
2857 Unfortunately, current versions of GNU ld do not reliably do
2858 this, and thus we may have found an incorrect entry above.
2859 As a (temporary) sanity check, we only use the entry if it
2860 lies *within* the bounds of the function. Note that this check
2861 might reject perfectly valid entries that just happen to cover
2862 multiple functions; therefore this check ought to be removed
2863 once the linker is fixed. */
2864 if (func_start > exidx_region)
2865 return 0;
2868 /* Decode the list of unwinding instructions into a prologue cache.
2869 Note that this may fail due to e.g. a "refuse to unwind" code. */
2870 cache = arm_exidx_fill_cache (this_frame, entry);
2871 if (!cache)
2872 return 0;
2874 *this_prologue_cache = cache;
2875 return 1;
2878 struct frame_unwind arm_exidx_unwind = {
2879 NORMAL_FRAME,
2880 default_frame_unwind_stop_reason,
2881 arm_prologue_this_id,
2882 arm_prologue_prev_register,
2883 NULL,
2884 arm_exidx_unwind_sniffer
2887 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2888 trampoline, return the target PC. Otherwise return 0.
2890 void call0a (char c, short s, int i, long l) {}
2892 int main (void)
2894 (*pointer_to_call0a) (c, s, i, l);
2897 Instead of calling a stub library function _call_via_xx (xx is
2898 the register name), GCC may inline the trampoline in the object
2899 file as below (register r2 has the address of call0a).
2901 .global main
2902 .type main, %function
2904 bl .L1
2906 .size main, .-main
2908 .L1:
2909 bx r2
2911 The trampoline 'bx r2' doesn't belong to main. */
2913 static CORE_ADDR
2914 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2916 /* The heuristics of recognizing such trampoline is that FRAME is
2917 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2918 if (arm_frame_is_thumb (frame))
2920 gdb_byte buf[2];
2922 if (target_read_memory (pc, buf, 2) == 0)
2924 struct gdbarch *gdbarch = get_frame_arch (frame);
2925 enum bfd_endian byte_order_for_code
2926 = gdbarch_byte_order_for_code (gdbarch);
2927 uint16_t insn
2928 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2930 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2932 CORE_ADDR dest
2933 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2935 /* Clear the LSB so that gdb core sets step-resume
2936 breakpoint at the right address. */
2937 return UNMAKE_THUMB_ADDR (dest);
2942 return 0;
2945 static struct arm_prologue_cache *
2946 arm_make_stub_cache (struct frame_info *this_frame)
2948 struct arm_prologue_cache *cache;
2950 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2951 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2953 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2955 return cache;
2958 /* Our frame ID for a stub frame is the current SP and LR. */
2960 static void
2961 arm_stub_this_id (struct frame_info *this_frame,
2962 void **this_cache,
2963 struct frame_id *this_id)
2965 struct arm_prologue_cache *cache;
2967 if (*this_cache == NULL)
2968 *this_cache = arm_make_stub_cache (this_frame);
2969 cache = (struct arm_prologue_cache *) *this_cache;
2971 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2974 static int
2975 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2976 struct frame_info *this_frame,
2977 void **this_prologue_cache)
2979 CORE_ADDR addr_in_block;
2980 gdb_byte dummy[4];
2981 CORE_ADDR pc, start_addr;
2982 const char *name;
2984 addr_in_block = get_frame_address_in_block (this_frame);
2985 pc = get_frame_pc (this_frame);
2986 if (in_plt_section (addr_in_block)
2987 /* We also use the stub winder if the target memory is unreadable
2988 to avoid having the prologue unwinder trying to read it. */
2989 || target_read_memory (pc, dummy, 4) != 0)
2990 return 1;
2992 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2993 && arm_skip_bx_reg (this_frame, pc) != 0)
2994 return 1;
2996 return 0;
2999 struct frame_unwind arm_stub_unwind = {
3000 NORMAL_FRAME,
3001 default_frame_unwind_stop_reason,
3002 arm_stub_this_id,
3003 arm_prologue_prev_register,
3004 NULL,
3005 arm_stub_unwind_sniffer
3008 /* Put here the code to store, into CACHE->saved_regs, the addresses
3009 of the saved registers of frame described by THIS_FRAME. CACHE is
3010 returned. */
3012 static struct arm_prologue_cache *
3013 arm_m_exception_cache (struct frame_info *this_frame)
3015 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3016 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3017 struct arm_prologue_cache *cache;
3018 CORE_ADDR unwound_sp;
3019 LONGEST xpsr;
3021 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3022 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3024 unwound_sp = get_frame_register_unsigned (this_frame,
3025 ARM_SP_REGNUM);
3027 /* The hardware saves eight 32-bit words, comprising xPSR,
3028 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3029 "B1.5.6 Exception entry behavior" in
3030 "ARMv7-M Architecture Reference Manual". */
3031 cache->saved_regs[0].addr = unwound_sp;
3032 cache->saved_regs[1].addr = unwound_sp + 4;
3033 cache->saved_regs[2].addr = unwound_sp + 8;
3034 cache->saved_regs[3].addr = unwound_sp + 12;
3035 cache->saved_regs[12].addr = unwound_sp + 16;
3036 cache->saved_regs[14].addr = unwound_sp + 20;
3037 cache->saved_regs[15].addr = unwound_sp + 24;
3038 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3040 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3041 aligner between the top of the 32-byte stack frame and the
3042 previous context's stack pointer. */
3043 cache->prev_sp = unwound_sp + 32;
3044 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3045 && (xpsr & (1 << 9)) != 0)
3046 cache->prev_sp += 4;
3048 return cache;
3051 /* Implementation of function hook 'this_id' in
3052 'struct frame_uwnind'. */
3054 static void
3055 arm_m_exception_this_id (struct frame_info *this_frame,
3056 void **this_cache,
3057 struct frame_id *this_id)
3059 struct arm_prologue_cache *cache;
3061 if (*this_cache == NULL)
3062 *this_cache = arm_m_exception_cache (this_frame);
3063 cache = (struct arm_prologue_cache *) *this_cache;
3065 /* Our frame ID for a stub frame is the current SP and LR. */
3066 *this_id = frame_id_build (cache->prev_sp,
3067 get_frame_pc (this_frame));
3070 /* Implementation of function hook 'prev_register' in
3071 'struct frame_uwnind'. */
3073 static struct value *
3074 arm_m_exception_prev_register (struct frame_info *this_frame,
3075 void **this_cache,
3076 int prev_regnum)
3078 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3079 struct arm_prologue_cache *cache;
3081 if (*this_cache == NULL)
3082 *this_cache = arm_m_exception_cache (this_frame);
3083 cache = (struct arm_prologue_cache *) *this_cache;
3085 /* The value was already reconstructed into PREV_SP. */
3086 if (prev_regnum == ARM_SP_REGNUM)
3087 return frame_unwind_got_constant (this_frame, prev_regnum,
3088 cache->prev_sp);
3090 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3091 prev_regnum);
3094 /* Implementation of function hook 'sniffer' in
3095 'struct frame_uwnind'. */
3097 static int
3098 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3099 struct frame_info *this_frame,
3100 void **this_prologue_cache)
3102 CORE_ADDR this_pc = get_frame_pc (this_frame);
3104 /* No need to check is_m; this sniffer is only registered for
3105 M-profile architectures. */
3107 /* Exception frames return to one of these magic PCs. Other values
3108 are not defined as of v7-M. See details in "B1.5.8 Exception
3109 return behavior" in "ARMv7-M Architecture Reference Manual". */
3110 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3111 || this_pc == 0xfffffffd)
3112 return 1;
3114 return 0;
3117 /* Frame unwinder for M-profile exceptions. */
3119 struct frame_unwind arm_m_exception_unwind =
3121 SIGTRAMP_FRAME,
3122 default_frame_unwind_stop_reason,
3123 arm_m_exception_this_id,
3124 arm_m_exception_prev_register,
3125 NULL,
3126 arm_m_exception_unwind_sniffer
3129 static CORE_ADDR
3130 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3132 struct arm_prologue_cache *cache;
3134 if (*this_cache == NULL)
3135 *this_cache = arm_make_prologue_cache (this_frame);
3136 cache = (struct arm_prologue_cache *) *this_cache;
3138 return cache->prev_sp - cache->framesize;
3141 struct frame_base arm_normal_base = {
3142 &arm_prologue_unwind,
3143 arm_normal_frame_base,
3144 arm_normal_frame_base,
3145 arm_normal_frame_base
3148 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3149 dummy frame. The frame ID's base needs to match the TOS value
3150 saved by save_dummy_frame_tos() and returned from
3151 arm_push_dummy_call, and the PC needs to match the dummy frame's
3152 breakpoint. */
3154 static struct frame_id
3155 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3157 return frame_id_build (get_frame_register_unsigned (this_frame,
3158 ARM_SP_REGNUM),
3159 get_frame_pc (this_frame));
3162 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3163 be used to construct the previous frame's ID, after looking up the
3164 containing function). */
3166 static CORE_ADDR
3167 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3169 CORE_ADDR pc;
3170 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3171 return arm_addr_bits_remove (gdbarch, pc);
3174 static CORE_ADDR
3175 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3177 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3180 static struct value *
3181 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3182 int regnum)
3184 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3185 CORE_ADDR lr, cpsr;
3186 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3188 switch (regnum)
3190 case ARM_PC_REGNUM:
3191 /* The PC is normally copied from the return column, which
3192 describes saves of LR. However, that version may have an
3193 extra bit set to indicate Thumb state. The bit is not
3194 part of the PC. */
3195 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3196 return frame_unwind_got_constant (this_frame, regnum,
3197 arm_addr_bits_remove (gdbarch, lr));
3199 case ARM_PS_REGNUM:
3200 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3201 cpsr = get_frame_register_unsigned (this_frame, regnum);
3202 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3203 if (IS_THUMB_ADDR (lr))
3204 cpsr |= t_bit;
3205 else
3206 cpsr &= ~t_bit;
3207 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3209 default:
3210 internal_error (__FILE__, __LINE__,
3211 _("Unexpected register %d"), regnum);
3215 static void
3216 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3217 struct dwarf2_frame_state_reg *reg,
3218 struct frame_info *this_frame)
3220 switch (regnum)
3222 case ARM_PC_REGNUM:
3223 case ARM_PS_REGNUM:
3224 reg->how = DWARF2_FRAME_REG_FN;
3225 reg->loc.fn = arm_dwarf2_prev_register;
3226 break;
3227 case ARM_SP_REGNUM:
3228 reg->how = DWARF2_FRAME_REG_CFA;
3229 break;
3233 /* Implement the stack_frame_destroyed_p gdbarch method. */
3235 static int
3236 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3238 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3239 unsigned int insn, insn2;
3240 int found_return = 0, found_stack_adjust = 0;
3241 CORE_ADDR func_start, func_end;
3242 CORE_ADDR scan_pc;
3243 gdb_byte buf[4];
3245 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3246 return 0;
3248 /* The epilogue is a sequence of instructions along the following lines:
3250 - add stack frame size to SP or FP
3251 - [if frame pointer used] restore SP from FP
3252 - restore registers from SP [may include PC]
3253 - a return-type instruction [if PC wasn't already restored]
3255 In a first pass, we scan forward from the current PC and verify the
3256 instructions we find as compatible with this sequence, ending in a
3257 return instruction.
3259 However, this is not sufficient to distinguish indirect function calls
3260 within a function from indirect tail calls in the epilogue in some cases.
3261 Therefore, if we didn't already find any SP-changing instruction during
3262 forward scan, we add a backward scanning heuristic to ensure we actually
3263 are in the epilogue. */
3265 scan_pc = pc;
3266 while (scan_pc < func_end && !found_return)
3268 if (target_read_memory (scan_pc, buf, 2))
3269 break;
3271 scan_pc += 2;
3272 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3274 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3275 found_return = 1;
3276 else if (insn == 0x46f7) /* mov pc, lr */
3277 found_return = 1;
3278 else if (thumb_instruction_restores_sp (insn))
3280 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3281 found_return = 1;
3283 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3285 if (target_read_memory (scan_pc, buf, 2))
3286 break;
3288 scan_pc += 2;
3289 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3291 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3293 if (insn2 & 0x8000) /* <registers> include PC. */
3294 found_return = 1;
3296 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3297 && (insn2 & 0x0fff) == 0x0b04)
3299 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3300 found_return = 1;
3302 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3303 && (insn2 & 0x0e00) == 0x0a00)
3305 else
3306 break;
3308 else
3309 break;
3312 if (!found_return)
3313 return 0;
3315 /* Since any instruction in the epilogue sequence, with the possible
3316 exception of return itself, updates the stack pointer, we need to
3317 scan backwards for at most one instruction. Try either a 16-bit or
3318 a 32-bit instruction. This is just a heuristic, so we do not worry
3319 too much about false positives. */
3321 if (pc - 4 < func_start)
3322 return 0;
3323 if (target_read_memory (pc - 4, buf, 4))
3324 return 0;
3326 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3327 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3329 if (thumb_instruction_restores_sp (insn2))
3330 found_stack_adjust = 1;
3331 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3332 found_stack_adjust = 1;
3333 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3334 && (insn2 & 0x0fff) == 0x0b04)
3335 found_stack_adjust = 1;
3336 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3337 && (insn2 & 0x0e00) == 0x0a00)
3338 found_stack_adjust = 1;
3340 return found_stack_adjust;
3343 /* Implement the stack_frame_destroyed_p gdbarch method. */
3345 static int
3346 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3348 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3349 unsigned int insn;
3350 int found_return;
3351 CORE_ADDR func_start, func_end;
3353 if (arm_pc_is_thumb (gdbarch, pc))
3354 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3356 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3357 return 0;
3359 /* We are in the epilogue if the previous instruction was a stack
3360 adjustment and the next instruction is a possible return (bx, mov
3361 pc, or pop). We could have to scan backwards to find the stack
3362 adjustment, or forwards to find the return, but this is a decent
3363 approximation. First scan forwards. */
3365 found_return = 0;
3366 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3367 if (bits (insn, 28, 31) != INST_NV)
3369 if ((insn & 0x0ffffff0) == 0x012fff10)
3370 /* BX. */
3371 found_return = 1;
3372 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3373 /* MOV PC. */
3374 found_return = 1;
3375 else if ((insn & 0x0fff0000) == 0x08bd0000
3376 && (insn & 0x0000c000) != 0)
3377 /* POP (LDMIA), including PC or LR. */
3378 found_return = 1;
3381 if (!found_return)
3382 return 0;
3384 /* Scan backwards. This is just a heuristic, so do not worry about
3385 false positives from mode changes. */
3387 if (pc < func_start + 4)
3388 return 0;
3390 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3391 if (arm_instruction_restores_sp (insn))
3392 return 1;
3394 return 0;
3398 /* When arguments must be pushed onto the stack, they go on in reverse
3399 order. The code below implements a FILO (stack) to do this. */
3401 struct stack_item
3403 int len;
3404 struct stack_item *prev;
3405 gdb_byte *data;
3408 static struct stack_item *
3409 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3411 struct stack_item *si;
3412 si = XNEW (struct stack_item);
3413 si->data = (gdb_byte *) xmalloc (len);
3414 si->len = len;
3415 si->prev = prev;
3416 memcpy (si->data, contents, len);
3417 return si;
3420 static struct stack_item *
3421 pop_stack_item (struct stack_item *si)
3423 struct stack_item *dead = si;
3424 si = si->prev;
3425 xfree (dead->data);
3426 xfree (dead);
3427 return si;
3431 /* Return the alignment (in bytes) of the given type. */
3433 static int
3434 arm_type_align (struct type *t)
3436 int n;
3437 int align;
3438 int falign;
3440 t = check_typedef (t);
3441 switch (TYPE_CODE (t))
3443 default:
3444 /* Should never happen. */
3445 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3446 return 4;
3448 case TYPE_CODE_PTR:
3449 case TYPE_CODE_ENUM:
3450 case TYPE_CODE_INT:
3451 case TYPE_CODE_FLT:
3452 case TYPE_CODE_SET:
3453 case TYPE_CODE_RANGE:
3454 case TYPE_CODE_REF:
3455 case TYPE_CODE_CHAR:
3456 case TYPE_CODE_BOOL:
3457 return TYPE_LENGTH (t);
3459 case TYPE_CODE_ARRAY:
3460 if (TYPE_VECTOR (t))
3462 /* Use the natural alignment for vector types (the same for
3463 scalar type), but the maximum alignment is 64-bit. */
3464 if (TYPE_LENGTH (t) > 8)
3465 return 8;
3466 else
3467 return TYPE_LENGTH (t);
3469 else
3470 return arm_type_align (TYPE_TARGET_TYPE (t));
3471 case TYPE_CODE_COMPLEX:
3472 return arm_type_align (TYPE_TARGET_TYPE (t));
3474 case TYPE_CODE_STRUCT:
3475 case TYPE_CODE_UNION:
3476 align = 1;
3477 for (n = 0; n < TYPE_NFIELDS (t); n++)
3479 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3480 if (falign > align)
3481 align = falign;
3483 return align;
3487 /* Possible base types for a candidate for passing and returning in
3488 VFP registers. */
3490 enum arm_vfp_cprc_base_type
3492 VFP_CPRC_UNKNOWN,
3493 VFP_CPRC_SINGLE,
3494 VFP_CPRC_DOUBLE,
3495 VFP_CPRC_VEC64,
3496 VFP_CPRC_VEC128
3499 /* The length of one element of base type B. */
3501 static unsigned
3502 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3504 switch (b)
3506 case VFP_CPRC_SINGLE:
3507 return 4;
3508 case VFP_CPRC_DOUBLE:
3509 return 8;
3510 case VFP_CPRC_VEC64:
3511 return 8;
3512 case VFP_CPRC_VEC128:
3513 return 16;
3514 default:
3515 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3516 (int) b);
3520 /* The character ('s', 'd' or 'q') for the type of VFP register used
3521 for passing base type B. */
3523 static int
3524 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3526 switch (b)
3528 case VFP_CPRC_SINGLE:
3529 return 's';
3530 case VFP_CPRC_DOUBLE:
3531 return 'd';
3532 case VFP_CPRC_VEC64:
3533 return 'd';
3534 case VFP_CPRC_VEC128:
3535 return 'q';
3536 default:
3537 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3538 (int) b);
3542 /* Determine whether T may be part of a candidate for passing and
3543 returning in VFP registers, ignoring the limit on the total number
3544 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3545 classification of the first valid component found; if it is not
3546 VFP_CPRC_UNKNOWN, all components must have the same classification
3547 as *BASE_TYPE. If it is found that T contains a type not permitted
3548 for passing and returning in VFP registers, a type differently
3549 classified from *BASE_TYPE, or two types differently classified
3550 from each other, return -1, otherwise return the total number of
3551 base-type elements found (possibly 0 in an empty structure or
3552 array). Vector types are not currently supported, matching the
3553 generic AAPCS support. */
3555 static int
3556 arm_vfp_cprc_sub_candidate (struct type *t,
3557 enum arm_vfp_cprc_base_type *base_type)
3559 t = check_typedef (t);
3560 switch (TYPE_CODE (t))
3562 case TYPE_CODE_FLT:
3563 switch (TYPE_LENGTH (t))
3565 case 4:
3566 if (*base_type == VFP_CPRC_UNKNOWN)
3567 *base_type = VFP_CPRC_SINGLE;
3568 else if (*base_type != VFP_CPRC_SINGLE)
3569 return -1;
3570 return 1;
3572 case 8:
3573 if (*base_type == VFP_CPRC_UNKNOWN)
3574 *base_type = VFP_CPRC_DOUBLE;
3575 else if (*base_type != VFP_CPRC_DOUBLE)
3576 return -1;
3577 return 1;
3579 default:
3580 return -1;
3582 break;
3584 case TYPE_CODE_COMPLEX:
3585 /* Arguments of complex T where T is one of the types float or
3586 double get treated as if they are implemented as:
3588 struct complexT
3590 T real;
3591 T imag;
3595 switch (TYPE_LENGTH (t))
3597 case 8:
3598 if (*base_type == VFP_CPRC_UNKNOWN)
3599 *base_type = VFP_CPRC_SINGLE;
3600 else if (*base_type != VFP_CPRC_SINGLE)
3601 return -1;
3602 return 2;
3604 case 16:
3605 if (*base_type == VFP_CPRC_UNKNOWN)
3606 *base_type = VFP_CPRC_DOUBLE;
3607 else if (*base_type != VFP_CPRC_DOUBLE)
3608 return -1;
3609 return 2;
3611 default:
3612 return -1;
3614 break;
3616 case TYPE_CODE_ARRAY:
3618 if (TYPE_VECTOR (t))
3620 /* A 64-bit or 128-bit containerized vector type are VFP
3621 CPRCs. */
3622 switch (TYPE_LENGTH (t))
3624 case 8:
3625 if (*base_type == VFP_CPRC_UNKNOWN)
3626 *base_type = VFP_CPRC_VEC64;
3627 return 1;
3628 case 16:
3629 if (*base_type == VFP_CPRC_UNKNOWN)
3630 *base_type = VFP_CPRC_VEC128;
3631 return 1;
3632 default:
3633 return -1;
3636 else
3638 int count;
3639 unsigned unitlen;
3641 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3642 base_type);
3643 if (count == -1)
3644 return -1;
3645 if (TYPE_LENGTH (t) == 0)
3647 gdb_assert (count == 0);
3648 return 0;
3650 else if (count == 0)
3651 return -1;
3652 unitlen = arm_vfp_cprc_unit_length (*base_type);
3653 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3654 return TYPE_LENGTH (t) / unitlen;
3657 break;
3659 case TYPE_CODE_STRUCT:
3661 int count = 0;
3662 unsigned unitlen;
3663 int i;
3664 for (i = 0; i < TYPE_NFIELDS (t); i++)
3666 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3667 base_type);
3668 if (sub_count == -1)
3669 return -1;
3670 count += sub_count;
3672 if (TYPE_LENGTH (t) == 0)
3674 gdb_assert (count == 0);
3675 return 0;
3677 else if (count == 0)
3678 return -1;
3679 unitlen = arm_vfp_cprc_unit_length (*base_type);
3680 if (TYPE_LENGTH (t) != unitlen * count)
3681 return -1;
3682 return count;
3685 case TYPE_CODE_UNION:
3687 int count = 0;
3688 unsigned unitlen;
3689 int i;
3690 for (i = 0; i < TYPE_NFIELDS (t); i++)
3692 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3693 base_type);
3694 if (sub_count == -1)
3695 return -1;
3696 count = (count > sub_count ? count : sub_count);
3698 if (TYPE_LENGTH (t) == 0)
3700 gdb_assert (count == 0);
3701 return 0;
3703 else if (count == 0)
3704 return -1;
3705 unitlen = arm_vfp_cprc_unit_length (*base_type);
3706 if (TYPE_LENGTH (t) != unitlen * count)
3707 return -1;
3708 return count;
3711 default:
3712 break;
3715 return -1;
3718 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3719 if passed to or returned from a non-variadic function with the VFP
3720 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3721 *BASE_TYPE to the base type for T and *COUNT to the number of
3722 elements of that base type before returning. */
3724 static int
3725 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3726 int *count)
3728 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3729 int c = arm_vfp_cprc_sub_candidate (t, &b);
3730 if (c <= 0 || c > 4)
3731 return 0;
3732 *base_type = b;
3733 *count = c;
3734 return 1;
3737 /* Return 1 if the VFP ABI should be used for passing arguments to and
3738 returning values from a function of type FUNC_TYPE, 0
3739 otherwise. */
3741 static int
3742 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3744 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3745 /* Variadic functions always use the base ABI. Assume that functions
3746 without debug info are not variadic. */
3747 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3748 return 0;
3749 /* The VFP ABI is only supported as a variant of AAPCS. */
3750 if (tdep->arm_abi != ARM_ABI_AAPCS)
3751 return 0;
3752 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3755 /* We currently only support passing parameters in integer registers, which
3756 conforms with GCC's default model, and VFP argument passing following
3757 the VFP variant of AAPCS. Several other variants exist and
3758 we should probably support some of them based on the selected ABI. */
3760 static CORE_ADDR
3761 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3762 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3763 struct value **args, CORE_ADDR sp, int struct_return,
3764 CORE_ADDR struct_addr)
3766 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3767 int argnum;
3768 int argreg;
3769 int nstack;
3770 struct stack_item *si = NULL;
3771 int use_vfp_abi;
3772 struct type *ftype;
3773 unsigned vfp_regs_free = (1 << 16) - 1;
3775 /* Determine the type of this function and whether the VFP ABI
3776 applies. */
3777 ftype = check_typedef (value_type (function));
3778 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3779 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3780 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3782 /* Set the return address. For the ARM, the return breakpoint is
3783 always at BP_ADDR. */
3784 if (arm_pc_is_thumb (gdbarch, bp_addr))
3785 bp_addr |= 1;
3786 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3788 /* Walk through the list of args and determine how large a temporary
3789 stack is required. Need to take care here as structs may be
3790 passed on the stack, and we have to push them. */
3791 nstack = 0;
3793 argreg = ARM_A1_REGNUM;
3794 nstack = 0;
3796 /* The struct_return pointer occupies the first parameter
3797 passing register. */
3798 if (struct_return)
3800 if (arm_debug)
3801 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3802 gdbarch_register_name (gdbarch, argreg),
3803 paddress (gdbarch, struct_addr));
3804 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3805 argreg++;
3808 for (argnum = 0; argnum < nargs; argnum++)
3810 int len;
3811 struct type *arg_type;
3812 struct type *target_type;
3813 enum type_code typecode;
3814 const bfd_byte *val;
3815 int align;
3816 enum arm_vfp_cprc_base_type vfp_base_type;
3817 int vfp_base_count;
3818 int may_use_core_reg = 1;
3820 arg_type = check_typedef (value_type (args[argnum]));
3821 len = TYPE_LENGTH (arg_type);
3822 target_type = TYPE_TARGET_TYPE (arg_type);
3823 typecode = TYPE_CODE (arg_type);
3824 val = value_contents (args[argnum]);
3826 align = arm_type_align (arg_type);
3827 /* Round alignment up to a whole number of words. */
3828 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3829 /* Different ABIs have different maximum alignments. */
3830 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3832 /* The APCS ABI only requires word alignment. */
3833 align = INT_REGISTER_SIZE;
3835 else
3837 /* The AAPCS requires at most doubleword alignment. */
3838 if (align > INT_REGISTER_SIZE * 2)
3839 align = INT_REGISTER_SIZE * 2;
3842 if (use_vfp_abi
3843 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3844 &vfp_base_count))
3846 int regno;
3847 int unit_length;
3848 int shift;
3849 unsigned mask;
3851 /* Because this is a CPRC it cannot go in a core register or
3852 cause a core register to be skipped for alignment.
3853 Either it goes in VFP registers and the rest of this loop
3854 iteration is skipped for this argument, or it goes on the
3855 stack (and the stack alignment code is correct for this
3856 case). */
3857 may_use_core_reg = 0;
3859 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3860 shift = unit_length / 4;
3861 mask = (1 << (shift * vfp_base_count)) - 1;
3862 for (regno = 0; regno < 16; regno += shift)
3863 if (((vfp_regs_free >> regno) & mask) == mask)
3864 break;
3866 if (regno < 16)
3868 int reg_char;
3869 int reg_scaled;
3870 int i;
3872 vfp_regs_free &= ~(mask << regno);
3873 reg_scaled = regno / shift;
3874 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3875 for (i = 0; i < vfp_base_count; i++)
3877 char name_buf[4];
3878 int regnum;
3879 if (reg_char == 'q')
3880 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3881 val + i * unit_length);
3882 else
3884 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3885 reg_char, reg_scaled + i);
3886 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3887 strlen (name_buf));
3888 regcache_cooked_write (regcache, regnum,
3889 val + i * unit_length);
3892 continue;
3894 else
3896 /* This CPRC could not go in VFP registers, so all VFP
3897 registers are now marked as used. */
3898 vfp_regs_free = 0;
3902 /* Push stack padding for dowubleword alignment. */
3903 if (nstack & (align - 1))
3905 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3906 nstack += INT_REGISTER_SIZE;
3909 /* Doubleword aligned quantities must go in even register pairs. */
3910 if (may_use_core_reg
3911 && argreg <= ARM_LAST_ARG_REGNUM
3912 && align > INT_REGISTER_SIZE
3913 && argreg & 1)
3914 argreg++;
3916 /* If the argument is a pointer to a function, and it is a
3917 Thumb function, create a LOCAL copy of the value and set
3918 the THUMB bit in it. */
3919 if (TYPE_CODE_PTR == typecode
3920 && target_type != NULL
3921 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3923 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3924 if (arm_pc_is_thumb (gdbarch, regval))
3926 bfd_byte *copy = (bfd_byte *) alloca (len);
3927 store_unsigned_integer (copy, len, byte_order,
3928 MAKE_THUMB_ADDR (regval));
3929 val = copy;
3933 /* Copy the argument to general registers or the stack in
3934 register-sized pieces. Large arguments are split between
3935 registers and stack. */
3936 while (len > 0)
3938 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3939 CORE_ADDR regval
3940 = extract_unsigned_integer (val, partial_len, byte_order);
3942 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3944 /* The argument is being passed in a general purpose
3945 register. */
3946 if (byte_order == BFD_ENDIAN_BIG)
3947 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3948 if (arm_debug)
3949 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3950 argnum,
3951 gdbarch_register_name
3952 (gdbarch, argreg),
3953 phex (regval, INT_REGISTER_SIZE));
3954 regcache_cooked_write_unsigned (regcache, argreg, regval);
3955 argreg++;
3957 else
3959 gdb_byte buf[INT_REGISTER_SIZE];
3961 memset (buf, 0, sizeof (buf));
3962 store_unsigned_integer (buf, partial_len, byte_order, regval);
3964 /* Push the arguments onto the stack. */
3965 if (arm_debug)
3966 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3967 argnum, nstack);
3968 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3969 nstack += INT_REGISTER_SIZE;
3972 len -= partial_len;
3973 val += partial_len;
3976 /* If we have an odd number of words to push, then decrement the stack
3977 by one word now, so first stack argument will be dword aligned. */
3978 if (nstack & 4)
3979 sp -= 4;
3981 while (si)
3983 sp -= si->len;
3984 write_memory (sp, si->data, si->len);
3985 si = pop_stack_item (si);
3988 /* Finally, update teh SP register. */
3989 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3991 return sp;
3995 /* Always align the frame to an 8-byte boundary. This is required on
3996 some platforms and harmless on the rest. */
3998 static CORE_ADDR
3999 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4001 /* Align the stack to eight bytes. */
4002 return sp & ~ (CORE_ADDR) 7;
4005 static void
4006 print_fpu_flags (struct ui_file *file, int flags)
4008 if (flags & (1 << 0))
4009 fputs_filtered ("IVO ", file);
4010 if (flags & (1 << 1))
4011 fputs_filtered ("DVZ ", file);
4012 if (flags & (1 << 2))
4013 fputs_filtered ("OFL ", file);
4014 if (flags & (1 << 3))
4015 fputs_filtered ("UFL ", file);
4016 if (flags & (1 << 4))
4017 fputs_filtered ("INX ", file);
4018 fputc_filtered ('\n', file);
4021 /* Print interesting information about the floating point processor
4022 (if present) or emulator. */
4023 static void
4024 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4025 struct frame_info *frame, const char *args)
4027 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4028 int type;
4030 type = (status >> 24) & 127;
4031 if (status & (1 << 31))
4032 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4033 else
4034 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4035 /* i18n: [floating point unit] mask */
4036 fputs_filtered (_("mask: "), file);
4037 print_fpu_flags (file, status >> 16);
4038 /* i18n: [floating point unit] flags */
4039 fputs_filtered (_("flags: "), file);
4040 print_fpu_flags (file, status);
4043 /* Construct the ARM extended floating point type. */
4044 static struct type *
4045 arm_ext_type (struct gdbarch *gdbarch)
4047 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4049 if (!tdep->arm_ext_type)
4050 tdep->arm_ext_type
4051 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4052 floatformats_arm_ext);
4054 return tdep->arm_ext_type;
4057 static struct type *
4058 arm_neon_double_type (struct gdbarch *gdbarch)
4060 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4062 if (tdep->neon_double_type == NULL)
4064 struct type *t, *elem;
4066 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4067 TYPE_CODE_UNION);
4068 elem = builtin_type (gdbarch)->builtin_uint8;
4069 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4070 elem = builtin_type (gdbarch)->builtin_uint16;
4071 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4072 elem = builtin_type (gdbarch)->builtin_uint32;
4073 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4074 elem = builtin_type (gdbarch)->builtin_uint64;
4075 append_composite_type_field (t, "u64", elem);
4076 elem = builtin_type (gdbarch)->builtin_float;
4077 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4078 elem = builtin_type (gdbarch)->builtin_double;
4079 append_composite_type_field (t, "f64", elem);
4081 TYPE_VECTOR (t) = 1;
4082 TYPE_NAME (t) = "neon_d";
4083 tdep->neon_double_type = t;
4086 return tdep->neon_double_type;
4089 /* FIXME: The vector types are not correctly ordered on big-endian
4090 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4091 bits of d0 - regardless of what unit size is being held in d0. So
4092 the offset of the first uint8 in d0 is 7, but the offset of the
4093 first float is 4. This code works as-is for little-endian
4094 targets. */
4096 static struct type *
4097 arm_neon_quad_type (struct gdbarch *gdbarch)
4099 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4101 if (tdep->neon_quad_type == NULL)
4103 struct type *t, *elem;
4105 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4106 TYPE_CODE_UNION);
4107 elem = builtin_type (gdbarch)->builtin_uint8;
4108 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4109 elem = builtin_type (gdbarch)->builtin_uint16;
4110 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4111 elem = builtin_type (gdbarch)->builtin_uint32;
4112 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4113 elem = builtin_type (gdbarch)->builtin_uint64;
4114 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4115 elem = builtin_type (gdbarch)->builtin_float;
4116 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4117 elem = builtin_type (gdbarch)->builtin_double;
4118 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4120 TYPE_VECTOR (t) = 1;
4121 TYPE_NAME (t) = "neon_q";
4122 tdep->neon_quad_type = t;
4125 return tdep->neon_quad_type;
4128 /* Return the GDB type object for the "standard" data type of data in
4129 register N. */
4131 static struct type *
4132 arm_register_type (struct gdbarch *gdbarch, int regnum)
4134 int num_regs = gdbarch_num_regs (gdbarch);
4136 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4137 && regnum >= num_regs && regnum < num_regs + 32)
4138 return builtin_type (gdbarch)->builtin_float;
4140 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4141 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4142 return arm_neon_quad_type (gdbarch);
4144 /* If the target description has register information, we are only
4145 in this function so that we can override the types of
4146 double-precision registers for NEON. */
4147 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4149 struct type *t = tdesc_register_type (gdbarch, regnum);
4151 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4152 && TYPE_CODE (t) == TYPE_CODE_FLT
4153 && gdbarch_tdep (gdbarch)->have_neon)
4154 return arm_neon_double_type (gdbarch);
4155 else
4156 return t;
4159 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4161 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4162 return builtin_type (gdbarch)->builtin_void;
4164 return arm_ext_type (gdbarch);
4166 else if (regnum == ARM_SP_REGNUM)
4167 return builtin_type (gdbarch)->builtin_data_ptr;
4168 else if (regnum == ARM_PC_REGNUM)
4169 return builtin_type (gdbarch)->builtin_func_ptr;
4170 else if (regnum >= ARRAY_SIZE (arm_register_names))
4171 /* These registers are only supported on targets which supply
4172 an XML description. */
4173 return builtin_type (gdbarch)->builtin_int0;
4174 else
4175 return builtin_type (gdbarch)->builtin_uint32;
4178 /* Map a DWARF register REGNUM onto the appropriate GDB register
4179 number. */
4181 static int
4182 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4184 /* Core integer regs. */
4185 if (reg >= 0 && reg <= 15)
4186 return reg;
4188 /* Legacy FPA encoding. These were once used in a way which
4189 overlapped with VFP register numbering, so their use is
4190 discouraged, but GDB doesn't support the ARM toolchain
4191 which used them for VFP. */
4192 if (reg >= 16 && reg <= 23)
4193 return ARM_F0_REGNUM + reg - 16;
4195 /* New assignments for the FPA registers. */
4196 if (reg >= 96 && reg <= 103)
4197 return ARM_F0_REGNUM + reg - 96;
4199 /* WMMX register assignments. */
4200 if (reg >= 104 && reg <= 111)
4201 return ARM_WCGR0_REGNUM + reg - 104;
4203 if (reg >= 112 && reg <= 127)
4204 return ARM_WR0_REGNUM + reg - 112;
4206 if (reg >= 192 && reg <= 199)
4207 return ARM_WC0_REGNUM + reg - 192;
4209 /* VFP v2 registers. A double precision value is actually
4210 in d1 rather than s2, but the ABI only defines numbering
4211 for the single precision registers. This will "just work"
4212 in GDB for little endian targets (we'll read eight bytes,
4213 starting in s0 and then progressing to s1), but will be
4214 reversed on big endian targets with VFP. This won't
4215 be a problem for the new Neon quad registers; you're supposed
4216 to use DW_OP_piece for those. */
4217 if (reg >= 64 && reg <= 95)
4219 char name_buf[4];
4221 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4222 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4223 strlen (name_buf));
4226 /* VFP v3 / Neon registers. This range is also used for VFP v2
4227 registers, except that it now describes d0 instead of s0. */
4228 if (reg >= 256 && reg <= 287)
4230 char name_buf[4];
4232 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4233 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4234 strlen (name_buf));
4237 return -1;
4240 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4241 static int
4242 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4244 int reg = regnum;
4245 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4247 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4248 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4250 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4251 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4253 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4254 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4256 if (reg < NUM_GREGS)
4257 return SIM_ARM_R0_REGNUM + reg;
4258 reg -= NUM_GREGS;
4260 if (reg < NUM_FREGS)
4261 return SIM_ARM_FP0_REGNUM + reg;
4262 reg -= NUM_FREGS;
4264 if (reg < NUM_SREGS)
4265 return SIM_ARM_FPS_REGNUM + reg;
4266 reg -= NUM_SREGS;
4268 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4271 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4272 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4273 It is thought that this is is the floating-point register format on
4274 little-endian systems. */
4276 static void
4277 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4278 void *dbl, int endianess)
4280 DOUBLEST d;
4282 if (endianess == BFD_ENDIAN_BIG)
4283 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4284 else
4285 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4286 ptr, &d);
4287 floatformat_from_doublest (fmt, &d, dbl);
4290 static void
4291 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4292 int endianess)
4294 DOUBLEST d;
4296 floatformat_to_doublest (fmt, ptr, &d);
4297 if (endianess == BFD_ENDIAN_BIG)
4298 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4299 else
4300 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4301 &d, dbl);
4304 static int
4305 condition_true (unsigned long cond, unsigned long status_reg)
4307 if (cond == INST_AL || cond == INST_NV)
4308 return 1;
4310 switch (cond)
4312 case INST_EQ:
4313 return ((status_reg & FLAG_Z) != 0);
4314 case INST_NE:
4315 return ((status_reg & FLAG_Z) == 0);
4316 case INST_CS:
4317 return ((status_reg & FLAG_C) != 0);
4318 case INST_CC:
4319 return ((status_reg & FLAG_C) == 0);
4320 case INST_MI:
4321 return ((status_reg & FLAG_N) != 0);
4322 case INST_PL:
4323 return ((status_reg & FLAG_N) == 0);
4324 case INST_VS:
4325 return ((status_reg & FLAG_V) != 0);
4326 case INST_VC:
4327 return ((status_reg & FLAG_V) == 0);
4328 case INST_HI:
4329 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4330 case INST_LS:
4331 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4332 case INST_GE:
4333 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4334 case INST_LT:
4335 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4336 case INST_GT:
4337 return (((status_reg & FLAG_Z) == 0)
4338 && (((status_reg & FLAG_N) == 0)
4339 == ((status_reg & FLAG_V) == 0)));
4340 case INST_LE:
4341 return (((status_reg & FLAG_Z) != 0)
4342 || (((status_reg & FLAG_N) == 0)
4343 != ((status_reg & FLAG_V) == 0)));
4345 return 1;
4348 static unsigned long
4349 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4350 unsigned long pc_val, unsigned long status_reg)
4352 unsigned long res, shift;
4353 int rm = bits (inst, 0, 3);
4354 unsigned long shifttype = bits (inst, 5, 6);
4356 if (bit (inst, 4))
4358 int rs = bits (inst, 8, 11);
4359 shift = (rs == 15 ? pc_val + 8
4360 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4362 else
4363 shift = bits (inst, 7, 11);
4365 res = (rm == ARM_PC_REGNUM
4366 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4367 : get_frame_register_unsigned (frame, rm));
4369 switch (shifttype)
4371 case 0: /* LSL */
4372 res = shift >= 32 ? 0 : res << shift;
4373 break;
4375 case 1: /* LSR */
4376 res = shift >= 32 ? 0 : res >> shift;
4377 break;
4379 case 2: /* ASR */
4380 if (shift >= 32)
4381 shift = 31;
4382 res = ((res & 0x80000000L)
4383 ? ~((~res) >> shift) : res >> shift);
4384 break;
4386 case 3: /* ROR/RRX */
4387 shift &= 31;
4388 if (shift == 0)
4389 res = (res >> 1) | (carry ? 0x80000000L : 0);
4390 else
4391 res = (res >> shift) | (res << (32 - shift));
4392 break;
4395 return res & 0xffffffff;
4398 /* Return number of 1-bits in VAL. */
4400 static int
4401 bitcount (unsigned long val)
4403 int nbits;
4404 for (nbits = 0; val != 0; nbits++)
4405 val &= val - 1; /* Delete rightmost 1-bit in val. */
4406 return nbits;
4409 static int
4410 thumb_advance_itstate (unsigned int itstate)
4412 /* Preserve IT[7:5], the first three bits of the condition. Shift
4413 the upcoming condition flags left by one bit. */
4414 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4416 /* If we have finished the IT block, clear the state. */
4417 if ((itstate & 0x0f) == 0)
4418 itstate = 0;
4420 return itstate;
4423 /* Find the next PC after the current instruction executes. In some
4424 cases we can not statically determine the answer (see the IT state
4425 handling in this function); in that case, a breakpoint may be
4426 inserted in addition to the returned PC, which will be used to set
4427 another breakpoint by our caller. */
4429 static CORE_ADDR
4430 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4432 struct gdbarch *gdbarch = get_frame_arch (frame);
4433 struct address_space *aspace = get_frame_address_space (frame);
4434 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4435 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4436 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4437 unsigned short inst1;
4438 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4439 unsigned long offset;
4440 ULONGEST status, itstate;
4442 nextpc = MAKE_THUMB_ADDR (nextpc);
4443 pc_val = MAKE_THUMB_ADDR (pc_val);
4445 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4447 /* Thumb-2 conditional execution support. There are eight bits in
4448 the CPSR which describe conditional execution state. Once
4449 reconstructed (they're in a funny order), the low five bits
4450 describe the low bit of the condition for each instruction and
4451 how many instructions remain. The high three bits describe the
4452 base condition. One of the low four bits will be set if an IT
4453 block is active. These bits read as zero on earlier
4454 processors. */
4455 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4456 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4458 /* If-Then handling. On GNU/Linux, where this routine is used, we
4459 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4460 can disable execution of the undefined instruction. So we might
4461 miss the breakpoint if we set it on a skipped conditional
4462 instruction. Because conditional instructions can change the
4463 flags, affecting the execution of further instructions, we may
4464 need to set two breakpoints. */
4466 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4468 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4470 /* An IT instruction. Because this instruction does not
4471 modify the flags, we can accurately predict the next
4472 executed instruction. */
4473 itstate = inst1 & 0x00ff;
4474 pc += thumb_insn_size (inst1);
4476 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4478 inst1 = read_memory_unsigned_integer (pc, 2,
4479 byte_order_for_code);
4480 pc += thumb_insn_size (inst1);
4481 itstate = thumb_advance_itstate (itstate);
4484 return MAKE_THUMB_ADDR (pc);
4486 else if (itstate != 0)
4488 /* We are in a conditional block. Check the condition. */
4489 if (! condition_true (itstate >> 4, status))
4491 /* Advance to the next executed instruction. */
4492 pc += thumb_insn_size (inst1);
4493 itstate = thumb_advance_itstate (itstate);
4495 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4497 inst1 = read_memory_unsigned_integer (pc, 2,
4498 byte_order_for_code);
4499 pc += thumb_insn_size (inst1);
4500 itstate = thumb_advance_itstate (itstate);
4503 return MAKE_THUMB_ADDR (pc);
4505 else if ((itstate & 0x0f) == 0x08)
4507 /* This is the last instruction of the conditional
4508 block, and it is executed. We can handle it normally
4509 because the following instruction is not conditional,
4510 and we must handle it normally because it is
4511 permitted to branch. Fall through. */
4513 else
4515 int cond_negated;
4517 /* There are conditional instructions after this one.
4518 If this instruction modifies the flags, then we can
4519 not predict what the next executed instruction will
4520 be. Fortunately, this instruction is architecturally
4521 forbidden to branch; we know it will fall through.
4522 Start by skipping past it. */
4523 pc += thumb_insn_size (inst1);
4524 itstate = thumb_advance_itstate (itstate);
4526 /* Set a breakpoint on the following instruction. */
4527 gdb_assert ((itstate & 0x0f) != 0);
4528 arm_insert_single_step_breakpoint (gdbarch, aspace,
4529 MAKE_THUMB_ADDR (pc));
4530 cond_negated = (itstate >> 4) & 1;
4532 /* Skip all following instructions with the same
4533 condition. If there is a later instruction in the IT
4534 block with the opposite condition, set the other
4535 breakpoint there. If not, then set a breakpoint on
4536 the instruction after the IT block. */
4539 inst1 = read_memory_unsigned_integer (pc, 2,
4540 byte_order_for_code);
4541 pc += thumb_insn_size (inst1);
4542 itstate = thumb_advance_itstate (itstate);
4544 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4546 return MAKE_THUMB_ADDR (pc);
4550 else if (itstate & 0x0f)
4552 /* We are in a conditional block. Check the condition. */
4553 int cond = itstate >> 4;
4555 if (! condition_true (cond, status))
4556 /* Advance to the next instruction. All the 32-bit
4557 instructions share a common prefix. */
4558 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4560 /* Otherwise, handle the instruction normally. */
4563 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4565 CORE_ADDR sp;
4567 /* Fetch the saved PC from the stack. It's stored above
4568 all of the other registers. */
4569 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4570 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4571 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4573 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4575 unsigned long cond = bits (inst1, 8, 11);
4576 if (cond == 0x0f) /* 0x0f = SWI */
4578 struct gdbarch_tdep *tdep;
4579 tdep = gdbarch_tdep (gdbarch);
4581 if (tdep->syscall_next_pc != NULL)
4582 nextpc = tdep->syscall_next_pc (frame);
4585 else if (cond != 0x0f && condition_true (cond, status))
4586 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4588 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4590 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4592 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4594 unsigned short inst2;
4595 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4597 /* Default to the next instruction. */
4598 nextpc = pc + 4;
4599 nextpc = MAKE_THUMB_ADDR (nextpc);
4601 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4603 /* Branches and miscellaneous control instructions. */
4605 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4607 /* B, BL, BLX. */
4608 int j1, j2, imm1, imm2;
4610 imm1 = sbits (inst1, 0, 10);
4611 imm2 = bits (inst2, 0, 10);
4612 j1 = bit (inst2, 13);
4613 j2 = bit (inst2, 11);
4615 offset = ((imm1 << 12) + (imm2 << 1));
4616 offset ^= ((!j2) << 22) | ((!j1) << 23);
4618 nextpc = pc_val + offset;
4619 /* For BLX make sure to clear the low bits. */
4620 if (bit (inst2, 12) == 0)
4621 nextpc = nextpc & 0xfffffffc;
4623 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4625 /* SUBS PC, LR, #imm8. */
4626 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4627 nextpc -= inst2 & 0x00ff;
4629 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4631 /* Conditional branch. */
4632 if (condition_true (bits (inst1, 6, 9), status))
4634 int sign, j1, j2, imm1, imm2;
4636 sign = sbits (inst1, 10, 10);
4637 imm1 = bits (inst1, 0, 5);
4638 imm2 = bits (inst2, 0, 10);
4639 j1 = bit (inst2, 13);
4640 j2 = bit (inst2, 11);
4642 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4643 offset += (imm1 << 12) + (imm2 << 1);
4645 nextpc = pc_val + offset;
4649 else if ((inst1 & 0xfe50) == 0xe810)
4651 /* Load multiple or RFE. */
4652 int rn, offset, load_pc = 1;
4654 rn = bits (inst1, 0, 3);
4655 if (bit (inst1, 7) && !bit (inst1, 8))
4657 /* LDMIA or POP */
4658 if (!bit (inst2, 15))
4659 load_pc = 0;
4660 offset = bitcount (inst2) * 4 - 4;
4662 else if (!bit (inst1, 7) && bit (inst1, 8))
4664 /* LDMDB */
4665 if (!bit (inst2, 15))
4666 load_pc = 0;
4667 offset = -4;
4669 else if (bit (inst1, 7) && bit (inst1, 8))
4671 /* RFEIA */
4672 offset = 0;
4674 else if (!bit (inst1, 7) && !bit (inst1, 8))
4676 /* RFEDB */
4677 offset = -8;
4679 else
4680 load_pc = 0;
4682 if (load_pc)
4684 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4685 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4688 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4690 /* MOV PC or MOVS PC. */
4691 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4692 nextpc = MAKE_THUMB_ADDR (nextpc);
4694 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4696 /* LDR PC. */
4697 CORE_ADDR base;
4698 int rn, load_pc = 1;
4700 rn = bits (inst1, 0, 3);
4701 base = get_frame_register_unsigned (frame, rn);
4702 if (rn == ARM_PC_REGNUM)
4704 base = (base + 4) & ~(CORE_ADDR) 0x3;
4705 if (bit (inst1, 7))
4706 base += bits (inst2, 0, 11);
4707 else
4708 base -= bits (inst2, 0, 11);
4710 else if (bit (inst1, 7))
4711 base += bits (inst2, 0, 11);
4712 else if (bit (inst2, 11))
4714 if (bit (inst2, 10))
4716 if (bit (inst2, 9))
4717 base += bits (inst2, 0, 7);
4718 else
4719 base -= bits (inst2, 0, 7);
4722 else if ((inst2 & 0x0fc0) == 0x0000)
4724 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4725 base += get_frame_register_unsigned (frame, rm) << shift;
4727 else
4728 /* Reserved. */
4729 load_pc = 0;
4731 if (load_pc)
4732 nextpc = get_frame_memory_unsigned (frame, base, 4);
4734 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4736 /* TBB. */
4737 CORE_ADDR tbl_reg, table, offset, length;
4739 tbl_reg = bits (inst1, 0, 3);
4740 if (tbl_reg == 0x0f)
4741 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4742 else
4743 table = get_frame_register_unsigned (frame, tbl_reg);
4745 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4746 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4747 nextpc = pc_val + length;
4749 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4751 /* TBH. */
4752 CORE_ADDR tbl_reg, table, offset, length;
4754 tbl_reg = bits (inst1, 0, 3);
4755 if (tbl_reg == 0x0f)
4756 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4757 else
4758 table = get_frame_register_unsigned (frame, tbl_reg);
4760 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4761 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4762 nextpc = pc_val + length;
4765 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4767 if (bits (inst1, 3, 6) == 0x0f)
4768 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4769 else
4770 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4772 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4774 if (bits (inst1, 3, 6) == 0x0f)
4775 nextpc = pc_val;
4776 else
4777 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4779 nextpc = MAKE_THUMB_ADDR (nextpc);
4781 else if ((inst1 & 0xf500) == 0xb100)
4783 /* CBNZ or CBZ. */
4784 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4785 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4787 if (bit (inst1, 11) && reg != 0)
4788 nextpc = pc_val + imm;
4789 else if (!bit (inst1, 11) && reg == 0)
4790 nextpc = pc_val + imm;
4792 return nextpc;
4795 /* Get the raw next address. PC is the current program counter, in
4796 FRAME, which is assumed to be executing in ARM mode.
4798 The value returned has the execution state of the next instruction
4799 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4800 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4801 address. */
4803 static CORE_ADDR
4804 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4806 struct gdbarch *gdbarch = get_frame_arch (frame);
4807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4808 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4809 unsigned long pc_val;
4810 unsigned long this_instr;
4811 unsigned long status;
4812 CORE_ADDR nextpc;
4814 pc_val = (unsigned long) pc;
4815 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4817 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4818 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4820 if (bits (this_instr, 28, 31) == INST_NV)
4821 switch (bits (this_instr, 24, 27))
4823 case 0xa:
4824 case 0xb:
4826 /* Branch with Link and change to Thumb. */
4827 nextpc = BranchDest (pc, this_instr);
4828 nextpc |= bit (this_instr, 24) << 1;
4829 nextpc = MAKE_THUMB_ADDR (nextpc);
4830 break;
4832 case 0xc:
4833 case 0xd:
4834 case 0xe:
4835 /* Coprocessor register transfer. */
4836 if (bits (this_instr, 12, 15) == 15)
4837 error (_("Invalid update to pc in instruction"));
4838 break;
4840 else if (condition_true (bits (this_instr, 28, 31), status))
4842 switch (bits (this_instr, 24, 27))
4844 case 0x0:
4845 case 0x1: /* data processing */
4846 case 0x2:
4847 case 0x3:
4849 unsigned long operand1, operand2, result = 0;
4850 unsigned long rn;
4851 int c;
4853 if (bits (this_instr, 12, 15) != 15)
4854 break;
4856 if (bits (this_instr, 22, 25) == 0
4857 && bits (this_instr, 4, 7) == 9) /* multiply */
4858 error (_("Invalid update to pc in instruction"));
4860 /* BX <reg>, BLX <reg> */
4861 if (bits (this_instr, 4, 27) == 0x12fff1
4862 || bits (this_instr, 4, 27) == 0x12fff3)
4864 rn = bits (this_instr, 0, 3);
4865 nextpc = ((rn == ARM_PC_REGNUM)
4866 ? (pc_val + 8)
4867 : get_frame_register_unsigned (frame, rn));
4869 return nextpc;
4872 /* Multiply into PC. */
4873 c = (status & FLAG_C) ? 1 : 0;
4874 rn = bits (this_instr, 16, 19);
4875 operand1 = ((rn == ARM_PC_REGNUM)
4876 ? (pc_val + 8)
4877 : get_frame_register_unsigned (frame, rn));
4879 if (bit (this_instr, 25))
4881 unsigned long immval = bits (this_instr, 0, 7);
4882 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4883 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4884 & 0xffffffff;
4886 else /* operand 2 is a shifted register. */
4887 operand2 = shifted_reg_val (frame, this_instr, c,
4888 pc_val, status);
4890 switch (bits (this_instr, 21, 24))
4892 case 0x0: /*and */
4893 result = operand1 & operand2;
4894 break;
4896 case 0x1: /*eor */
4897 result = operand1 ^ operand2;
4898 break;
4900 case 0x2: /*sub */
4901 result = operand1 - operand2;
4902 break;
4904 case 0x3: /*rsb */
4905 result = operand2 - operand1;
4906 break;
4908 case 0x4: /*add */
4909 result = operand1 + operand2;
4910 break;
4912 case 0x5: /*adc */
4913 result = operand1 + operand2 + c;
4914 break;
4916 case 0x6: /*sbc */
4917 result = operand1 - operand2 + c;
4918 break;
4920 case 0x7: /*rsc */
4921 result = operand2 - operand1 + c;
4922 break;
4924 case 0x8:
4925 case 0x9:
4926 case 0xa:
4927 case 0xb: /* tst, teq, cmp, cmn */
4928 result = (unsigned long) nextpc;
4929 break;
4931 case 0xc: /*orr */
4932 result = operand1 | operand2;
4933 break;
4935 case 0xd: /*mov */
4936 /* Always step into a function. */
4937 result = operand2;
4938 break;
4940 case 0xe: /*bic */
4941 result = operand1 & ~operand2;
4942 break;
4944 case 0xf: /*mvn */
4945 result = ~operand2;
4946 break;
4949 /* In 26-bit APCS the bottom two bits of the result are
4950 ignored, and we always end up in ARM state. */
4951 if (!arm_apcs_32)
4952 nextpc = arm_addr_bits_remove (gdbarch, result);
4953 else
4954 nextpc = result;
4956 break;
4959 case 0x4:
4960 case 0x5: /* data transfer */
4961 case 0x6:
4962 case 0x7:
4963 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
4965 /* Media instructions and architecturally undefined
4966 instructions. */
4967 break;
4970 if (bit (this_instr, 20))
4972 /* load */
4973 if (bits (this_instr, 12, 15) == 15)
4975 /* rd == pc */
4976 unsigned long rn;
4977 unsigned long base;
4979 if (bit (this_instr, 22))
4980 error (_("Invalid update to pc in instruction"));
4982 /* byte write to PC */
4983 rn = bits (this_instr, 16, 19);
4984 base = ((rn == ARM_PC_REGNUM)
4985 ? (pc_val + 8)
4986 : get_frame_register_unsigned (frame, rn));
4988 if (bit (this_instr, 24))
4990 /* pre-indexed */
4991 int c = (status & FLAG_C) ? 1 : 0;
4992 unsigned long offset =
4993 (bit (this_instr, 25)
4994 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4995 : bits (this_instr, 0, 11));
4997 if (bit (this_instr, 23))
4998 base += offset;
4999 else
5000 base -= offset;
5002 nextpc =
5003 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
5004 4, byte_order);
5007 break;
5009 case 0x8:
5010 case 0x9: /* block transfer */
5011 if (bit (this_instr, 20))
5013 /* LDM */
5014 if (bit (this_instr, 15))
5016 /* loading pc */
5017 int offset = 0;
5018 unsigned long rn_val
5019 = get_frame_register_unsigned (frame,
5020 bits (this_instr, 16, 19));
5022 if (bit (this_instr, 23))
5024 /* up */
5025 unsigned long reglist = bits (this_instr, 0, 14);
5026 offset = bitcount (reglist) * 4;
5027 if (bit (this_instr, 24)) /* pre */
5028 offset += 4;
5030 else if (bit (this_instr, 24))
5031 offset = -4;
5033 nextpc =
5034 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5035 (rn_val + offset),
5036 4, byte_order);
5039 break;
5041 case 0xb: /* branch & link */
5042 case 0xa: /* branch */
5044 nextpc = BranchDest (pc, this_instr);
5045 break;
5048 case 0xc:
5049 case 0xd:
5050 case 0xe: /* coproc ops */
5051 break;
5052 case 0xf: /* SWI */
5054 struct gdbarch_tdep *tdep;
5055 tdep = gdbarch_tdep (gdbarch);
5057 if (tdep->syscall_next_pc != NULL)
5058 nextpc = tdep->syscall_next_pc (frame);
5061 break;
5063 default:
5064 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5065 return (pc);
5069 return nextpc;
5072 /* Determine next PC after current instruction executes. Will call either
5073 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5074 loop is detected. */
5076 CORE_ADDR
5077 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5079 CORE_ADDR nextpc;
5081 if (arm_frame_is_thumb (frame))
5082 nextpc = thumb_get_next_pc_raw (frame, pc);
5083 else
5084 nextpc = arm_get_next_pc_raw (frame, pc);
5086 return nextpc;
5089 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5090 of the appropriate mode (as encoded in the PC value), even if this
5091 differs from what would be expected according to the symbol tables. */
5093 void
5094 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5095 struct address_space *aspace,
5096 CORE_ADDR pc)
5098 struct cleanup *old_chain
5099 = make_cleanup_restore_integer (&arm_override_mode);
5101 arm_override_mode = IS_THUMB_ADDR (pc);
5102 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5104 insert_single_step_breakpoint (gdbarch, aspace, pc);
5106 do_cleanups (old_chain);
5109 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5110 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5111 is found, attempt to step through it. A breakpoint is placed at the end of
5112 the sequence. */
5114 static int
5115 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5117 struct gdbarch *gdbarch = get_frame_arch (frame);
5118 struct address_space *aspace = get_frame_address_space (frame);
5119 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5120 CORE_ADDR pc = get_frame_pc (frame);
5121 CORE_ADDR breaks[2] = {-1, -1};
5122 CORE_ADDR loc = pc;
5123 unsigned short insn1, insn2;
5124 int insn_count;
5125 int index;
5126 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5127 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5128 ULONGEST status, itstate;
5130 /* We currently do not support atomic sequences within an IT block. */
5131 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5132 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5133 if (itstate & 0x0f)
5134 return 0;
5136 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5137 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5138 loc += 2;
5139 if (thumb_insn_size (insn1) != 4)
5140 return 0;
5142 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5143 loc += 2;
5144 if (!((insn1 & 0xfff0) == 0xe850
5145 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5146 return 0;
5148 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5149 instructions. */
5150 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5152 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5153 loc += 2;
5155 if (thumb_insn_size (insn1) != 4)
5157 /* Assume that there is at most one conditional branch in the
5158 atomic sequence. If a conditional branch is found, put a
5159 breakpoint in its destination address. */
5160 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5162 if (last_breakpoint > 0)
5163 return 0; /* More than one conditional branch found,
5164 fallback to the standard code. */
5166 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5167 last_breakpoint++;
5170 /* We do not support atomic sequences that use any *other*
5171 instructions but conditional branches to change the PC.
5172 Fall back to standard code to avoid losing control of
5173 execution. */
5174 else if (thumb_instruction_changes_pc (insn1))
5175 return 0;
5177 else
5179 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5180 loc += 2;
5182 /* Assume that there is at most one conditional branch in the
5183 atomic sequence. If a conditional branch is found, put a
5184 breakpoint in its destination address. */
5185 if ((insn1 & 0xf800) == 0xf000
5186 && (insn2 & 0xd000) == 0x8000
5187 && (insn1 & 0x0380) != 0x0380)
5189 int sign, j1, j2, imm1, imm2;
5190 unsigned int offset;
5192 sign = sbits (insn1, 10, 10);
5193 imm1 = bits (insn1, 0, 5);
5194 imm2 = bits (insn2, 0, 10);
5195 j1 = bit (insn2, 13);
5196 j2 = bit (insn2, 11);
5198 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5199 offset += (imm1 << 12) + (imm2 << 1);
5201 if (last_breakpoint > 0)
5202 return 0; /* More than one conditional branch found,
5203 fallback to the standard code. */
5205 breaks[1] = loc + offset;
5206 last_breakpoint++;
5209 /* We do not support atomic sequences that use any *other*
5210 instructions but conditional branches to change the PC.
5211 Fall back to standard code to avoid losing control of
5212 execution. */
5213 else if (thumb2_instruction_changes_pc (insn1, insn2))
5214 return 0;
5216 /* If we find a strex{,b,h,d}, we're done. */
5217 if ((insn1 & 0xfff0) == 0xe840
5218 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5219 break;
5223 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5224 if (insn_count == atomic_sequence_length)
5225 return 0;
5227 /* Insert a breakpoint right after the end of the atomic sequence. */
5228 breaks[0] = loc;
5230 /* Check for duplicated breakpoints. Check also for a breakpoint
5231 placed (branch instruction's destination) anywhere in sequence. */
5232 if (last_breakpoint
5233 && (breaks[1] == breaks[0]
5234 || (breaks[1] >= pc && breaks[1] < loc)))
5235 last_breakpoint = 0;
5237 /* Effectively inserts the breakpoints. */
5238 for (index = 0; index <= last_breakpoint; index++)
5239 arm_insert_single_step_breakpoint (gdbarch, aspace,
5240 MAKE_THUMB_ADDR (breaks[index]));
5242 return 1;
5245 static int
5246 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5248 struct gdbarch *gdbarch = get_frame_arch (frame);
5249 struct address_space *aspace = get_frame_address_space (frame);
5250 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5251 CORE_ADDR pc = get_frame_pc (frame);
5252 CORE_ADDR breaks[2] = {-1, -1};
5253 CORE_ADDR loc = pc;
5254 unsigned int insn;
5255 int insn_count;
5256 int index;
5257 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5258 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5260 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5261 Note that we do not currently support conditionally executed atomic
5262 instructions. */
5263 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5264 loc += 4;
5265 if ((insn & 0xff9000f0) != 0xe1900090)
5266 return 0;
5268 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5269 instructions. */
5270 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5272 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5273 loc += 4;
5275 /* Assume that there is at most one conditional branch in the atomic
5276 sequence. If a conditional branch is found, put a breakpoint in
5277 its destination address. */
5278 if (bits (insn, 24, 27) == 0xa)
5280 if (last_breakpoint > 0)
5281 return 0; /* More than one conditional branch found, fallback
5282 to the standard single-step code. */
5284 breaks[1] = BranchDest (loc - 4, insn);
5285 last_breakpoint++;
5288 /* We do not support atomic sequences that use any *other* instructions
5289 but conditional branches to change the PC. Fall back to standard
5290 code to avoid losing control of execution. */
5291 else if (arm_instruction_changes_pc (insn))
5292 return 0;
5294 /* If we find a strex{,b,h,d}, we're done. */
5295 if ((insn & 0xff9000f0) == 0xe1800090)
5296 break;
5299 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5300 if (insn_count == atomic_sequence_length)
5301 return 0;
5303 /* Insert a breakpoint right after the end of the atomic sequence. */
5304 breaks[0] = loc;
5306 /* Check for duplicated breakpoints. Check also for a breakpoint
5307 placed (branch instruction's destination) anywhere in sequence. */
5308 if (last_breakpoint
5309 && (breaks[1] == breaks[0]
5310 || (breaks[1] >= pc && breaks[1] < loc)))
5311 last_breakpoint = 0;
5313 /* Effectively inserts the breakpoints. */
5314 for (index = 0; index <= last_breakpoint; index++)
5315 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5317 return 1;
5321 arm_deal_with_atomic_sequence (struct frame_info *frame)
5323 if (arm_frame_is_thumb (frame))
5324 return thumb_deal_with_atomic_sequence_raw (frame);
5325 else
5326 return arm_deal_with_atomic_sequence_raw (frame);
5329 /* single_step() is called just before we want to resume the inferior,
5330 if we want to single-step it but there is no hardware or kernel
5331 single-step support. We find the target of the coming instruction
5332 and breakpoint it. */
5335 arm_software_single_step (struct frame_info *frame)
5337 struct gdbarch *gdbarch = get_frame_arch (frame);
5338 struct address_space *aspace = get_frame_address_space (frame);
5339 CORE_ADDR next_pc;
5341 if (arm_deal_with_atomic_sequence (frame))
5342 return 1;
5344 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5345 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5347 return 1;
5350 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5351 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5352 NULL if an error occurs. BUF is freed. */
5354 static gdb_byte *
5355 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5356 int old_len, int new_len)
5358 gdb_byte *new_buf;
5359 int bytes_to_read = new_len - old_len;
5361 new_buf = (gdb_byte *) xmalloc (new_len);
5362 memcpy (new_buf + bytes_to_read, buf, old_len);
5363 xfree (buf);
5364 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5366 xfree (new_buf);
5367 return NULL;
5369 return new_buf;
5372 /* An IT block is at most the 2-byte IT instruction followed by
5373 four 4-byte instructions. The furthest back we must search to
5374 find an IT block that affects the current instruction is thus
5375 2 + 3 * 4 == 14 bytes. */
5376 #define MAX_IT_BLOCK_PREFIX 14
5378 /* Use a quick scan if there are more than this many bytes of
5379 code. */
5380 #define IT_SCAN_THRESHOLD 32
5382 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5383 A breakpoint in an IT block may not be hit, depending on the
5384 condition flags. */
5385 static CORE_ADDR
5386 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5388 gdb_byte *buf;
5389 char map_type;
5390 CORE_ADDR boundary, func_start;
5391 int buf_len;
5392 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5393 int i, any, last_it, last_it_count;
5395 /* If we are using BKPT breakpoints, none of this is necessary. */
5396 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5397 return bpaddr;
5399 /* ARM mode does not have this problem. */
5400 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5401 return bpaddr;
5403 /* We are setting a breakpoint in Thumb code that could potentially
5404 contain an IT block. The first step is to find how much Thumb
5405 code there is; we do not need to read outside of known Thumb
5406 sequences. */
5407 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5408 if (map_type == 0)
5409 /* Thumb-2 code must have mapping symbols to have a chance. */
5410 return bpaddr;
5412 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5414 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5415 && func_start > boundary)
5416 boundary = func_start;
5418 /* Search for a candidate IT instruction. We have to do some fancy
5419 footwork to distinguish a real IT instruction from the second
5420 half of a 32-bit instruction, but there is no need for that if
5421 there's no candidate. */
5422 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5423 if (buf_len == 0)
5424 /* No room for an IT instruction. */
5425 return bpaddr;
5427 buf = (gdb_byte *) xmalloc (buf_len);
5428 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5429 return bpaddr;
5430 any = 0;
5431 for (i = 0; i < buf_len; i += 2)
5433 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5434 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5436 any = 1;
5437 break;
5440 if (any == 0)
5442 xfree (buf);
5443 return bpaddr;
5446 /* OK, the code bytes before this instruction contain at least one
5447 halfword which resembles an IT instruction. We know that it's
5448 Thumb code, but there are still two possibilities. Either the
5449 halfword really is an IT instruction, or it is the second half of
5450 a 32-bit Thumb instruction. The only way we can tell is to
5451 scan forwards from a known instruction boundary. */
5452 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5454 int definite;
5456 /* There's a lot of code before this instruction. Start with an
5457 optimistic search; it's easy to recognize halfwords that can
5458 not be the start of a 32-bit instruction, and use that to
5459 lock on to the instruction boundaries. */
5460 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5461 if (buf == NULL)
5462 return bpaddr;
5463 buf_len = IT_SCAN_THRESHOLD;
5465 definite = 0;
5466 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5468 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5469 if (thumb_insn_size (inst1) == 2)
5471 definite = 1;
5472 break;
5476 /* At this point, if DEFINITE, BUF[I] is the first place we
5477 are sure that we know the instruction boundaries, and it is far
5478 enough from BPADDR that we could not miss an IT instruction
5479 affecting BPADDR. If ! DEFINITE, give up - start from a
5480 known boundary. */
5481 if (! definite)
5483 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5484 bpaddr - boundary);
5485 if (buf == NULL)
5486 return bpaddr;
5487 buf_len = bpaddr - boundary;
5488 i = 0;
5491 else
5493 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5494 if (buf == NULL)
5495 return bpaddr;
5496 buf_len = bpaddr - boundary;
5497 i = 0;
5500 /* Scan forwards. Find the last IT instruction before BPADDR. */
5501 last_it = -1;
5502 last_it_count = 0;
5503 while (i < buf_len)
5505 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5506 last_it_count--;
5507 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5509 last_it = i;
5510 if (inst1 & 0x0001)
5511 last_it_count = 4;
5512 else if (inst1 & 0x0002)
5513 last_it_count = 3;
5514 else if (inst1 & 0x0004)
5515 last_it_count = 2;
5516 else
5517 last_it_count = 1;
5519 i += thumb_insn_size (inst1);
5522 xfree (buf);
5524 if (last_it == -1)
5525 /* There wasn't really an IT instruction after all. */
5526 return bpaddr;
5528 if (last_it_count < 1)
5529 /* It was too far away. */
5530 return bpaddr;
5532 /* This really is a trouble spot. Move the breakpoint to the IT
5533 instruction. */
5534 return bpaddr - buf_len + last_it;
5537 /* ARM displaced stepping support.
5539 Generally ARM displaced stepping works as follows:
5541 1. When an instruction is to be single-stepped, it is first decoded by
5542 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5543 Depending on the type of instruction, it is then copied to a scratch
5544 location, possibly in a modified form. The copy_* set of functions
5545 performs such modification, as necessary. A breakpoint is placed after
5546 the modified instruction in the scratch space to return control to GDB.
5547 Note in particular that instructions which modify the PC will no longer
5548 do so after modification.
5550 2. The instruction is single-stepped, by setting the PC to the scratch
5551 location address, and resuming. Control returns to GDB when the
5552 breakpoint is hit.
5554 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5555 function used for the current instruction. This function's job is to
5556 put the CPU/memory state back to what it would have been if the
5557 instruction had been executed unmodified in its original location. */
5559 /* NOP instruction (mov r0, r0). */
5560 #define ARM_NOP 0xe1a00000
5561 #define THUMB_NOP 0x4600
5563 /* Helper for register reads for displaced stepping. In particular, this
5564 returns the PC as it would be seen by the instruction at its original
5565 location. */
5567 ULONGEST
5568 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5569 int regno)
5571 ULONGEST ret;
5572 CORE_ADDR from = dsc->insn_addr;
5574 if (regno == ARM_PC_REGNUM)
5576 /* Compute pipeline offset:
5577 - When executing an ARM instruction, PC reads as the address of the
5578 current instruction plus 8.
5579 - When executing a Thumb instruction, PC reads as the address of the
5580 current instruction plus 4. */
5582 if (!dsc->is_thumb)
5583 from += 8;
5584 else
5585 from += 4;
5587 if (debug_displaced)
5588 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5589 (unsigned long) from);
5590 return (ULONGEST) from;
5592 else
5594 regcache_cooked_read_unsigned (regs, regno, &ret);
5595 if (debug_displaced)
5596 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5597 regno, (unsigned long) ret);
5598 return ret;
5602 static int
5603 displaced_in_arm_mode (struct regcache *regs)
5605 ULONGEST ps;
5606 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5608 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5610 return (ps & t_bit) == 0;
5613 /* Write to the PC as from a branch instruction. */
5615 static void
5616 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5617 ULONGEST val)
5619 if (!dsc->is_thumb)
5620 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5621 architecture versions < 6. */
5622 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5623 val & ~(ULONGEST) 0x3);
5624 else
5625 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5626 val & ~(ULONGEST) 0x1);
5629 /* Write to the PC as from a branch-exchange instruction. */
5631 static void
5632 bx_write_pc (struct regcache *regs, ULONGEST val)
5634 ULONGEST ps;
5635 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5637 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5639 if ((val & 1) == 1)
5641 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5642 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5644 else if ((val & 2) == 0)
5646 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5647 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5649 else
5651 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5652 mode, align dest to 4 bytes). */
5653 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5654 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5655 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5659 /* Write to the PC as if from a load instruction. */
5661 static void
5662 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5663 ULONGEST val)
5665 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5666 bx_write_pc (regs, val);
5667 else
5668 branch_write_pc (regs, dsc, val);
5671 /* Write to the PC as if from an ALU instruction. */
5673 static void
5674 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5675 ULONGEST val)
5677 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5678 bx_write_pc (regs, val);
5679 else
5680 branch_write_pc (regs, dsc, val);
5683 /* Helper for writing to registers for displaced stepping. Writing to the PC
5684 has a varying effects depending on the instruction which does the write:
5685 this is controlled by the WRITE_PC argument. */
5687 void
5688 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5689 int regno, ULONGEST val, enum pc_write_style write_pc)
5691 if (regno == ARM_PC_REGNUM)
5693 if (debug_displaced)
5694 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5695 (unsigned long) val);
5696 switch (write_pc)
5698 case BRANCH_WRITE_PC:
5699 branch_write_pc (regs, dsc, val);
5700 break;
5702 case BX_WRITE_PC:
5703 bx_write_pc (regs, val);
5704 break;
5706 case LOAD_WRITE_PC:
5707 load_write_pc (regs, dsc, val);
5708 break;
5710 case ALU_WRITE_PC:
5711 alu_write_pc (regs, dsc, val);
5712 break;
5714 case CANNOT_WRITE_PC:
5715 warning (_("Instruction wrote to PC in an unexpected way when "
5716 "single-stepping"));
5717 break;
5719 default:
5720 internal_error (__FILE__, __LINE__,
5721 _("Invalid argument to displaced_write_reg"));
5724 dsc->wrote_to_pc = 1;
5726 else
5728 if (debug_displaced)
5729 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5730 regno, (unsigned long) val);
5731 regcache_cooked_write_unsigned (regs, regno, val);
5735 /* This function is used to concisely determine if an instruction INSN
5736 references PC. Register fields of interest in INSN should have the
5737 corresponding fields of BITMASK set to 0b1111. The function
5738 returns return 1 if any of these fields in INSN reference the PC
5739 (also 0b1111, r15), else it returns 0. */
5741 static int
5742 insn_references_pc (uint32_t insn, uint32_t bitmask)
5744 uint32_t lowbit = 1;
5746 while (bitmask != 0)
5748 uint32_t mask;
5750 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5753 if (!lowbit)
5754 break;
5756 mask = lowbit * 0xf;
5758 if ((insn & mask) == mask)
5759 return 1;
5761 bitmask &= ~mask;
5764 return 0;
5767 /* The simplest copy function. Many instructions have the same effect no
5768 matter what address they are executed at: in those cases, use this. */
5770 static int
5771 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5772 const char *iname, struct displaced_step_closure *dsc)
5774 if (debug_displaced)
5775 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5776 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5777 iname);
5779 dsc->modinsn[0] = insn;
5781 return 0;
5784 static int
5785 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5786 uint16_t insn2, const char *iname,
5787 struct displaced_step_closure *dsc)
5789 if (debug_displaced)
5790 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5791 "opcode/class '%s' unmodified\n", insn1, insn2,
5792 iname);
5794 dsc->modinsn[0] = insn1;
5795 dsc->modinsn[1] = insn2;
5796 dsc->numinsns = 2;
5798 return 0;
5801 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5802 modification. */
5803 static int
5804 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5805 const char *iname,
5806 struct displaced_step_closure *dsc)
5808 if (debug_displaced)
5809 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5810 "opcode/class '%s' unmodified\n", insn,
5811 iname);
5813 dsc->modinsn[0] = insn;
5815 return 0;
5818 /* Preload instructions with immediate offset. */
5820 static void
5821 cleanup_preload (struct gdbarch *gdbarch,
5822 struct regcache *regs, struct displaced_step_closure *dsc)
5824 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5825 if (!dsc->u.preload.immed)
5826 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5829 static void
5830 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5831 struct displaced_step_closure *dsc, unsigned int rn)
5833 ULONGEST rn_val;
5834 /* Preload instructions:
5836 {pli/pld} [rn, #+/-imm]
5838 {pli/pld} [r0, #+/-imm]. */
5840 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5841 rn_val = displaced_read_reg (regs, dsc, rn);
5842 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5843 dsc->u.preload.immed = 1;
5845 dsc->cleanup = &cleanup_preload;
5848 static int
5849 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5850 struct displaced_step_closure *dsc)
5852 unsigned int rn = bits (insn, 16, 19);
5854 if (!insn_references_pc (insn, 0x000f0000ul))
5855 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5857 if (debug_displaced)
5858 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5859 (unsigned long) insn);
5861 dsc->modinsn[0] = insn & 0xfff0ffff;
5863 install_preload (gdbarch, regs, dsc, rn);
5865 return 0;
5868 static int
5869 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5870 struct regcache *regs, struct displaced_step_closure *dsc)
5872 unsigned int rn = bits (insn1, 0, 3);
5873 unsigned int u_bit = bit (insn1, 7);
5874 int imm12 = bits (insn2, 0, 11);
5875 ULONGEST pc_val;
5877 if (rn != ARM_PC_REGNUM)
5878 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5880 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5881 PLD (literal) Encoding T1. */
5882 if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog,
5884 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5885 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5886 imm12);
5888 if (!u_bit)
5889 imm12 = -1 * imm12;
5891 /* Rewrite instruction {pli/pld} PC imm12 into:
5892 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5894 {pli/pld} [r0, r1]
5896 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5898 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5899 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5901 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5903 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5904 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5905 dsc->u.preload.immed = 0;
5907 /* {pli/pld} [r0, r1] */
5908 dsc->modinsn[0] = insn1 & 0xfff0;
5909 dsc->modinsn[1] = 0xf001;
5910 dsc->numinsns = 2;
5912 dsc->cleanup = &cleanup_preload;
5913 return 0;
5916 /* Preload instructions with register offset. */
5918 static void
5919 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5920 struct displaced_step_closure *dsc, unsigned int rn,
5921 unsigned int rm)
5923 ULONGEST rn_val, rm_val;
5925 /* Preload register-offset instructions:
5927 {pli/pld} [rn, rm {, shift}]
5929 {pli/pld} [r0, r1 {, shift}]. */
5931 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5932 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5933 rn_val = displaced_read_reg (regs, dsc, rn);
5934 rm_val = displaced_read_reg (regs, dsc, rm);
5935 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5936 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5937 dsc->u.preload.immed = 0;
5939 dsc->cleanup = &cleanup_preload;
5942 static int
5943 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5944 struct regcache *regs,
5945 struct displaced_step_closure *dsc)
5947 unsigned int rn = bits (insn, 16, 19);
5948 unsigned int rm = bits (insn, 0, 3);
5951 if (!insn_references_pc (insn, 0x000f000ful))
5952 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5954 if (debug_displaced)
5955 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5956 (unsigned long) insn);
5958 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5960 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5961 return 0;
5964 /* Copy/cleanup coprocessor load and store instructions. */
5966 static void
5967 cleanup_copro_load_store (struct gdbarch *gdbarch,
5968 struct regcache *regs,
5969 struct displaced_step_closure *dsc)
5971 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5973 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5975 if (dsc->u.ldst.writeback)
5976 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5979 static void
5980 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5981 struct displaced_step_closure *dsc,
5982 int writeback, unsigned int rn)
5984 ULONGEST rn_val;
5986 /* Coprocessor load/store instructions:
5988 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5990 {stc/stc2} [r0, #+/-imm].
5992 ldc/ldc2 are handled identically. */
5994 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5995 rn_val = displaced_read_reg (regs, dsc, rn);
5996 /* PC should be 4-byte aligned. */
5997 rn_val = rn_val & 0xfffffffc;
5998 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
6000 dsc->u.ldst.writeback = writeback;
6001 dsc->u.ldst.rn = rn;
6003 dsc->cleanup = &cleanup_copro_load_store;
6006 static int
6007 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
6008 struct regcache *regs,
6009 struct displaced_step_closure *dsc)
6011 unsigned int rn = bits (insn, 16, 19);
6013 if (!insn_references_pc (insn, 0x000f0000ul))
6014 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
6016 if (debug_displaced)
6017 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6018 "load/store insn %.8lx\n", (unsigned long) insn);
6020 dsc->modinsn[0] = insn & 0xfff0ffff;
6022 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
6024 return 0;
6027 static int
6028 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
6029 uint16_t insn2, struct regcache *regs,
6030 struct displaced_step_closure *dsc)
6032 unsigned int rn = bits (insn1, 0, 3);
6034 if (rn != ARM_PC_REGNUM)
6035 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6036 "copro load/store", dsc);
6038 if (debug_displaced)
6039 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6040 "load/store insn %.4x%.4x\n", insn1, insn2);
6042 dsc->modinsn[0] = insn1 & 0xfff0;
6043 dsc->modinsn[1] = insn2;
6044 dsc->numinsns = 2;
6046 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6047 doesn't support writeback, so pass 0. */
6048 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6050 return 0;
6053 /* Clean up branch instructions (actually perform the branch, by setting
6054 PC). */
6056 static void
6057 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6058 struct displaced_step_closure *dsc)
6060 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6061 int branch_taken = condition_true (dsc->u.branch.cond, status);
6062 enum pc_write_style write_pc = dsc->u.branch.exchange
6063 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6065 if (!branch_taken)
6066 return;
6068 if (dsc->u.branch.link)
6070 /* The value of LR should be the next insn of current one. In order
6071 not to confuse logic hanlding later insn `bx lr', if current insn mode
6072 is Thumb, the bit 0 of LR value should be set to 1. */
6073 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6075 if (dsc->is_thumb)
6076 next_insn_addr |= 0x1;
6078 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6079 CANNOT_WRITE_PC);
6082 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6085 /* Copy B/BL/BLX instructions with immediate destinations. */
6087 static void
6088 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6089 struct displaced_step_closure *dsc,
6090 unsigned int cond, int exchange, int link, long offset)
6092 /* Implement "BL<cond> <label>" as:
6094 Preparation: cond <- instruction condition
6095 Insn: mov r0, r0 (nop)
6096 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6098 B<cond> similar, but don't set r14 in cleanup. */
6100 dsc->u.branch.cond = cond;
6101 dsc->u.branch.link = link;
6102 dsc->u.branch.exchange = exchange;
6104 dsc->u.branch.dest = dsc->insn_addr;
6105 if (link && exchange)
6106 /* For BLX, offset is computed from the Align (PC, 4). */
6107 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6109 if (dsc->is_thumb)
6110 dsc->u.branch.dest += 4 + offset;
6111 else
6112 dsc->u.branch.dest += 8 + offset;
6114 dsc->cleanup = &cleanup_branch;
6116 static int
6117 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6118 struct regcache *regs, struct displaced_step_closure *dsc)
6120 unsigned int cond = bits (insn, 28, 31);
6121 int exchange = (cond == 0xf);
6122 int link = exchange || bit (insn, 24);
6123 long offset;
6125 if (debug_displaced)
6126 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6127 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6128 (unsigned long) insn);
6129 if (exchange)
6130 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6131 then arrange the switch into Thumb mode. */
6132 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6133 else
6134 offset = bits (insn, 0, 23) << 2;
6136 if (bit (offset, 25))
6137 offset = offset | ~0x3ffffff;
6139 dsc->modinsn[0] = ARM_NOP;
6141 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6142 return 0;
6145 static int
6146 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6147 uint16_t insn2, struct regcache *regs,
6148 struct displaced_step_closure *dsc)
6150 int link = bit (insn2, 14);
6151 int exchange = link && !bit (insn2, 12);
6152 int cond = INST_AL;
6153 long offset = 0;
6154 int j1 = bit (insn2, 13);
6155 int j2 = bit (insn2, 11);
6156 int s = sbits (insn1, 10, 10);
6157 int i1 = !(j1 ^ bit (insn1, 10));
6158 int i2 = !(j2 ^ bit (insn1, 10));
6160 if (!link && !exchange) /* B */
6162 offset = (bits (insn2, 0, 10) << 1);
6163 if (bit (insn2, 12)) /* Encoding T4 */
6165 offset |= (bits (insn1, 0, 9) << 12)
6166 | (i2 << 22)
6167 | (i1 << 23)
6168 | (s << 24);
6169 cond = INST_AL;
6171 else /* Encoding T3 */
6173 offset |= (bits (insn1, 0, 5) << 12)
6174 | (j1 << 18)
6175 | (j2 << 19)
6176 | (s << 20);
6177 cond = bits (insn1, 6, 9);
6180 else
6182 offset = (bits (insn1, 0, 9) << 12);
6183 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6184 offset |= exchange ?
6185 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6188 if (debug_displaced)
6189 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6190 "%.4x %.4x with offset %.8lx\n",
6191 link ? (exchange) ? "blx" : "bl" : "b",
6192 insn1, insn2, offset);
6194 dsc->modinsn[0] = THUMB_NOP;
6196 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6197 return 0;
6200 /* Copy B Thumb instructions. */
6201 static int
6202 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6203 struct displaced_step_closure *dsc)
6205 unsigned int cond = 0;
6206 int offset = 0;
6207 unsigned short bit_12_15 = bits (insn, 12, 15);
6208 CORE_ADDR from = dsc->insn_addr;
6210 if (bit_12_15 == 0xd)
6212 /* offset = SignExtend (imm8:0, 32) */
6213 offset = sbits ((insn << 1), 0, 8);
6214 cond = bits (insn, 8, 11);
6216 else if (bit_12_15 == 0xe) /* Encoding T2 */
6218 offset = sbits ((insn << 1), 0, 11);
6219 cond = INST_AL;
6222 if (debug_displaced)
6223 fprintf_unfiltered (gdb_stdlog,
6224 "displaced: copying b immediate insn %.4x "
6225 "with offset %d\n", insn, offset);
6227 dsc->u.branch.cond = cond;
6228 dsc->u.branch.link = 0;
6229 dsc->u.branch.exchange = 0;
6230 dsc->u.branch.dest = from + 4 + offset;
6232 dsc->modinsn[0] = THUMB_NOP;
6234 dsc->cleanup = &cleanup_branch;
6236 return 0;
6239 /* Copy BX/BLX with register-specified destinations. */
6241 static void
6242 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6243 struct displaced_step_closure *dsc, int link,
6244 unsigned int cond, unsigned int rm)
6246 /* Implement {BX,BLX}<cond> <reg>" as:
6248 Preparation: cond <- instruction condition
6249 Insn: mov r0, r0 (nop)
6250 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6252 Don't set r14 in cleanup for BX. */
6254 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6256 dsc->u.branch.cond = cond;
6257 dsc->u.branch.link = link;
6259 dsc->u.branch.exchange = 1;
6261 dsc->cleanup = &cleanup_branch;
6264 static int
6265 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6266 struct regcache *regs, struct displaced_step_closure *dsc)
6268 unsigned int cond = bits (insn, 28, 31);
6269 /* BX: x12xxx1x
6270 BLX: x12xxx3x. */
6271 int link = bit (insn, 5);
6272 unsigned int rm = bits (insn, 0, 3);
6274 if (debug_displaced)
6275 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6276 (unsigned long) insn);
6278 dsc->modinsn[0] = ARM_NOP;
6280 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6281 return 0;
6284 static int
6285 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6286 struct regcache *regs,
6287 struct displaced_step_closure *dsc)
6289 int link = bit (insn, 7);
6290 unsigned int rm = bits (insn, 3, 6);
6292 if (debug_displaced)
6293 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6294 (unsigned short) insn);
6296 dsc->modinsn[0] = THUMB_NOP;
6298 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6300 return 0;
6304 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6306 static void
6307 cleanup_alu_imm (struct gdbarch *gdbarch,
6308 struct regcache *regs, struct displaced_step_closure *dsc)
6310 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6311 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6312 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6313 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6316 static int
6317 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6318 struct displaced_step_closure *dsc)
6320 unsigned int rn = bits (insn, 16, 19);
6321 unsigned int rd = bits (insn, 12, 15);
6322 unsigned int op = bits (insn, 21, 24);
6323 int is_mov = (op == 0xd);
6324 ULONGEST rd_val, rn_val;
6326 if (!insn_references_pc (insn, 0x000ff000ul))
6327 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6329 if (debug_displaced)
6330 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6331 "%.8lx\n", is_mov ? "move" : "ALU",
6332 (unsigned long) insn);
6334 /* Instruction is of form:
6336 <op><cond> rd, [rn,] #imm
6338 Rewrite as:
6340 Preparation: tmp1, tmp2 <- r0, r1;
6341 r0, r1 <- rd, rn
6342 Insn: <op><cond> r0, r1, #imm
6343 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6346 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6347 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6348 rn_val = displaced_read_reg (regs, dsc, rn);
6349 rd_val = displaced_read_reg (regs, dsc, rd);
6350 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6351 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6352 dsc->rd = rd;
6354 if (is_mov)
6355 dsc->modinsn[0] = insn & 0xfff00fff;
6356 else
6357 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6359 dsc->cleanup = &cleanup_alu_imm;
6361 return 0;
6364 static int
6365 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6366 uint16_t insn2, struct regcache *regs,
6367 struct displaced_step_closure *dsc)
6369 unsigned int op = bits (insn1, 5, 8);
6370 unsigned int rn, rm, rd;
6371 ULONGEST rd_val, rn_val;
6373 rn = bits (insn1, 0, 3); /* Rn */
6374 rm = bits (insn2, 0, 3); /* Rm */
6375 rd = bits (insn2, 8, 11); /* Rd */
6377 /* This routine is only called for instruction MOV. */
6378 gdb_assert (op == 0x2 && rn == 0xf);
6380 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6381 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6383 if (debug_displaced)
6384 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6385 "ALU", insn1, insn2);
6387 /* Instruction is of form:
6389 <op><cond> rd, [rn,] #imm
6391 Rewrite as:
6393 Preparation: tmp1, tmp2 <- r0, r1;
6394 r0, r1 <- rd, rn
6395 Insn: <op><cond> r0, r1, #imm
6396 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6399 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6400 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6401 rn_val = displaced_read_reg (regs, dsc, rn);
6402 rd_val = displaced_read_reg (regs, dsc, rd);
6403 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6404 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6405 dsc->rd = rd;
6407 dsc->modinsn[0] = insn1;
6408 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6409 dsc->numinsns = 2;
6411 dsc->cleanup = &cleanup_alu_imm;
6413 return 0;
6416 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6418 static void
6419 cleanup_alu_reg (struct gdbarch *gdbarch,
6420 struct regcache *regs, struct displaced_step_closure *dsc)
6422 ULONGEST rd_val;
6423 int i;
6425 rd_val = displaced_read_reg (regs, dsc, 0);
6427 for (i = 0; i < 3; i++)
6428 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6430 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6433 static void
6434 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6435 struct displaced_step_closure *dsc,
6436 unsigned int rd, unsigned int rn, unsigned int rm)
6438 ULONGEST rd_val, rn_val, rm_val;
6440 /* Instruction is of form:
6442 <op><cond> rd, [rn,] rm [, <shift>]
6444 Rewrite as:
6446 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6447 r0, r1, r2 <- rd, rn, rm
6448 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6449 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6452 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6453 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6454 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6455 rd_val = displaced_read_reg (regs, dsc, rd);
6456 rn_val = displaced_read_reg (regs, dsc, rn);
6457 rm_val = displaced_read_reg (regs, dsc, rm);
6458 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6459 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6460 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6461 dsc->rd = rd;
6463 dsc->cleanup = &cleanup_alu_reg;
6466 static int
6467 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6468 struct displaced_step_closure *dsc)
6470 unsigned int op = bits (insn, 21, 24);
6471 int is_mov = (op == 0xd);
6473 if (!insn_references_pc (insn, 0x000ff00ful))
6474 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6476 if (debug_displaced)
6477 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6478 is_mov ? "move" : "ALU", (unsigned long) insn);
6480 if (is_mov)
6481 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6482 else
6483 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6485 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6486 bits (insn, 0, 3));
6487 return 0;
6490 static int
6491 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6492 struct regcache *regs,
6493 struct displaced_step_closure *dsc)
6495 unsigned rm, rd;
6497 rm = bits (insn, 3, 6);
6498 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6500 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6501 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6503 if (debug_displaced)
6504 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
6505 (unsigned short) insn);
6507 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6509 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6511 return 0;
6514 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6516 static void
6517 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6518 struct regcache *regs,
6519 struct displaced_step_closure *dsc)
6521 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6522 int i;
6524 for (i = 0; i < 4; i++)
6525 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6527 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6530 static void
6531 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6532 struct displaced_step_closure *dsc,
6533 unsigned int rd, unsigned int rn, unsigned int rm,
6534 unsigned rs)
6536 int i;
6537 ULONGEST rd_val, rn_val, rm_val, rs_val;
6539 /* Instruction is of form:
6541 <op><cond> rd, [rn,] rm, <shift> rs
6543 Rewrite as:
6545 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6546 r0, r1, r2, r3 <- rd, rn, rm, rs
6547 Insn: <op><cond> r0, r1, r2, <shift> r3
6548 Cleanup: tmp5 <- r0
6549 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6550 rd <- tmp5
6553 for (i = 0; i < 4; i++)
6554 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6556 rd_val = displaced_read_reg (regs, dsc, rd);
6557 rn_val = displaced_read_reg (regs, dsc, rn);
6558 rm_val = displaced_read_reg (regs, dsc, rm);
6559 rs_val = displaced_read_reg (regs, dsc, rs);
6560 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6561 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6562 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6563 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6564 dsc->rd = rd;
6565 dsc->cleanup = &cleanup_alu_shifted_reg;
6568 static int
6569 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6570 struct regcache *regs,
6571 struct displaced_step_closure *dsc)
6573 unsigned int op = bits (insn, 21, 24);
6574 int is_mov = (op == 0xd);
6575 unsigned int rd, rn, rm, rs;
6577 if (!insn_references_pc (insn, 0x000fff0ful))
6578 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6580 if (debug_displaced)
6581 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6582 "%.8lx\n", is_mov ? "move" : "ALU",
6583 (unsigned long) insn);
6585 rn = bits (insn, 16, 19);
6586 rm = bits (insn, 0, 3);
6587 rs = bits (insn, 8, 11);
6588 rd = bits (insn, 12, 15);
6590 if (is_mov)
6591 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6592 else
6593 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6595 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6597 return 0;
6600 /* Clean up load instructions. */
6602 static void
6603 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6604 struct displaced_step_closure *dsc)
6606 ULONGEST rt_val, rt_val2 = 0, rn_val;
6608 rt_val = displaced_read_reg (regs, dsc, 0);
6609 if (dsc->u.ldst.xfersize == 8)
6610 rt_val2 = displaced_read_reg (regs, dsc, 1);
6611 rn_val = displaced_read_reg (regs, dsc, 2);
6613 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6614 if (dsc->u.ldst.xfersize > 4)
6615 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6616 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6617 if (!dsc->u.ldst.immed)
6618 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6620 /* Handle register writeback. */
6621 if (dsc->u.ldst.writeback)
6622 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6623 /* Put result in right place. */
6624 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6625 if (dsc->u.ldst.xfersize == 8)
6626 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6629 /* Clean up store instructions. */
6631 static void
6632 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6633 struct displaced_step_closure *dsc)
6635 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6637 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6638 if (dsc->u.ldst.xfersize > 4)
6639 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6640 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6641 if (!dsc->u.ldst.immed)
6642 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6643 if (!dsc->u.ldst.restore_r4)
6644 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6646 /* Writeback. */
6647 if (dsc->u.ldst.writeback)
6648 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6651 /* Copy "extra" load/store instructions. These are halfword/doubleword
6652 transfers, which have a different encoding to byte/word transfers. */
6654 static int
6655 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6656 struct regcache *regs, struct displaced_step_closure *dsc)
6658 unsigned int op1 = bits (insn, 20, 24);
6659 unsigned int op2 = bits (insn, 5, 6);
6660 unsigned int rt = bits (insn, 12, 15);
6661 unsigned int rn = bits (insn, 16, 19);
6662 unsigned int rm = bits (insn, 0, 3);
6663 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6664 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6665 int immed = (op1 & 0x4) != 0;
6666 int opcode;
6667 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6669 if (!insn_references_pc (insn, 0x000ff00ful))
6670 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6672 if (debug_displaced)
6673 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6674 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6675 (unsigned long) insn);
6677 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6679 if (opcode < 0)
6680 internal_error (__FILE__, __LINE__,
6681 _("copy_extra_ld_st: instruction decode error"));
6683 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6684 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6685 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6686 if (!immed)
6687 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6689 rt_val = displaced_read_reg (regs, dsc, rt);
6690 if (bytesize[opcode] == 8)
6691 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6692 rn_val = displaced_read_reg (regs, dsc, rn);
6693 if (!immed)
6694 rm_val = displaced_read_reg (regs, dsc, rm);
6696 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6697 if (bytesize[opcode] == 8)
6698 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6699 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6700 if (!immed)
6701 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6703 dsc->rd = rt;
6704 dsc->u.ldst.xfersize = bytesize[opcode];
6705 dsc->u.ldst.rn = rn;
6706 dsc->u.ldst.immed = immed;
6707 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6708 dsc->u.ldst.restore_r4 = 0;
6710 if (immed)
6711 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6713 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6714 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6715 else
6716 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6718 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6719 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6721 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6723 return 0;
6726 /* Copy byte/half word/word loads and stores. */
6728 static void
6729 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6730 struct displaced_step_closure *dsc, int load,
6731 int immed, int writeback, int size, int usermode,
6732 int rt, int rm, int rn)
6734 ULONGEST rt_val, rn_val, rm_val = 0;
6736 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6737 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6738 if (!immed)
6739 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6740 if (!load)
6741 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6743 rt_val = displaced_read_reg (regs, dsc, rt);
6744 rn_val = displaced_read_reg (regs, dsc, rn);
6745 if (!immed)
6746 rm_val = displaced_read_reg (regs, dsc, rm);
6748 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6749 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6750 if (!immed)
6751 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6752 dsc->rd = rt;
6753 dsc->u.ldst.xfersize = size;
6754 dsc->u.ldst.rn = rn;
6755 dsc->u.ldst.immed = immed;
6756 dsc->u.ldst.writeback = writeback;
6758 /* To write PC we can do:
6760 Before this sequence of instructions:
6761 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6762 r2 is the Rn value got from dispalced_read_reg.
6764 Insn1: push {pc} Write address of STR instruction + offset on stack
6765 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6766 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6767 = addr(Insn1) + offset - addr(Insn3) - 8
6768 = offset - 16
6769 Insn4: add r4, r4, #8 r4 = offset - 8
6770 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6771 = from + offset
6772 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6774 Otherwise we don't know what value to write for PC, since the offset is
6775 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6776 of this can be found in Section "Saving from r15" in
6777 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6779 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6783 static int
6784 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6785 uint16_t insn2, struct regcache *regs,
6786 struct displaced_step_closure *dsc, int size)
6788 unsigned int u_bit = bit (insn1, 7);
6789 unsigned int rt = bits (insn2, 12, 15);
6790 int imm12 = bits (insn2, 0, 11);
6791 ULONGEST pc_val;
6793 if (debug_displaced)
6794 fprintf_unfiltered (gdb_stdlog,
6795 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6796 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6797 imm12);
6799 if (!u_bit)
6800 imm12 = -1 * imm12;
6802 /* Rewrite instruction LDR Rt imm12 into:
6804 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6806 LDR R0, R2, R3,
6808 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6811 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6812 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6813 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6815 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6817 pc_val = pc_val & 0xfffffffc;
6819 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6820 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6822 dsc->rd = rt;
6824 dsc->u.ldst.xfersize = size;
6825 dsc->u.ldst.immed = 0;
6826 dsc->u.ldst.writeback = 0;
6827 dsc->u.ldst.restore_r4 = 0;
6829 /* LDR R0, R2, R3 */
6830 dsc->modinsn[0] = 0xf852;
6831 dsc->modinsn[1] = 0x3;
6832 dsc->numinsns = 2;
6834 dsc->cleanup = &cleanup_load;
6836 return 0;
6839 static int
6840 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6841 uint16_t insn2, struct regcache *regs,
6842 struct displaced_step_closure *dsc,
6843 int writeback, int immed)
6845 unsigned int rt = bits (insn2, 12, 15);
6846 unsigned int rn = bits (insn1, 0, 3);
6847 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6848 /* In LDR (register), there is also a register Rm, which is not allowed to
6849 be PC, so we don't have to check it. */
6851 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6852 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6853 dsc);
6855 if (debug_displaced)
6856 fprintf_unfiltered (gdb_stdlog,
6857 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6858 rt, rn, insn1, insn2);
6860 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6861 0, rt, rm, rn);
6863 dsc->u.ldst.restore_r4 = 0;
6865 if (immed)
6866 /* ldr[b]<cond> rt, [rn, #imm], etc.
6868 ldr[b]<cond> r0, [r2, #imm]. */
6870 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6871 dsc->modinsn[1] = insn2 & 0x0fff;
6873 else
6874 /* ldr[b]<cond> rt, [rn, rm], etc.
6876 ldr[b]<cond> r0, [r2, r3]. */
6878 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6879 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6882 dsc->numinsns = 2;
6884 return 0;
6888 static int
6889 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6890 struct regcache *regs,
6891 struct displaced_step_closure *dsc,
6892 int load, int size, int usermode)
6894 int immed = !bit (insn, 25);
6895 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6896 unsigned int rt = bits (insn, 12, 15);
6897 unsigned int rn = bits (insn, 16, 19);
6898 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6900 if (!insn_references_pc (insn, 0x000ff00ful))
6901 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6903 if (debug_displaced)
6904 fprintf_unfiltered (gdb_stdlog,
6905 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6906 load ? (size == 1 ? "ldrb" : "ldr")
6907 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6908 rt, rn,
6909 (unsigned long) insn);
6911 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6912 usermode, rt, rm, rn);
6914 if (load || rt != ARM_PC_REGNUM)
6916 dsc->u.ldst.restore_r4 = 0;
6918 if (immed)
6919 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6921 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6922 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6923 else
6924 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6926 {ldr,str}[b]<cond> r0, [r2, r3]. */
6927 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6929 else
6931 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6932 dsc->u.ldst.restore_r4 = 1;
6933 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6934 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6935 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6936 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6937 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6939 /* As above. */
6940 if (immed)
6941 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6942 else
6943 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6945 dsc->numinsns = 6;
6948 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6950 return 0;
6953 /* Cleanup LDM instructions with fully-populated register list. This is an
6954 unfortunate corner case: it's impossible to implement correctly by modifying
6955 the instruction. The issue is as follows: we have an instruction,
6957 ldm rN, {r0-r15}
6959 which we must rewrite to avoid loading PC. A possible solution would be to
6960 do the load in two halves, something like (with suitable cleanup
6961 afterwards):
6963 mov r8, rN
6964 ldm[id][ab] r8!, {r0-r7}
6965 str r7, <temp>
6966 ldm[id][ab] r8, {r7-r14}
6967 <bkpt>
6969 but at present there's no suitable place for <temp>, since the scratch space
6970 is overwritten before the cleanup routine is called. For now, we simply
6971 emulate the instruction. */
6973 static void
6974 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6975 struct displaced_step_closure *dsc)
6977 int inc = dsc->u.block.increment;
6978 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6979 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6980 uint32_t regmask = dsc->u.block.regmask;
6981 int regno = inc ? 0 : 15;
6982 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6983 int exception_return = dsc->u.block.load && dsc->u.block.user
6984 && (regmask & 0x8000) != 0;
6985 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6986 int do_transfer = condition_true (dsc->u.block.cond, status);
6987 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6989 if (!do_transfer)
6990 return;
6992 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6993 sensible we can do here. Complain loudly. */
6994 if (exception_return)
6995 error (_("Cannot single-step exception return"));
6997 /* We don't handle any stores here for now. */
6998 gdb_assert (dsc->u.block.load != 0);
7000 if (debug_displaced)
7001 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
7002 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
7003 dsc->u.block.increment ? "inc" : "dec",
7004 dsc->u.block.before ? "before" : "after");
7006 while (regmask)
7008 uint32_t memword;
7010 if (inc)
7011 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
7012 regno++;
7013 else
7014 while (regno >= 0 && (regmask & (1 << regno)) == 0)
7015 regno--;
7017 xfer_addr += bump_before;
7019 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
7020 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
7022 xfer_addr += bump_after;
7024 regmask &= ~(1 << regno);
7027 if (dsc->u.block.writeback)
7028 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
7029 CANNOT_WRITE_PC);
7032 /* Clean up an STM which included the PC in the register list. */
7034 static void
7035 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7036 struct displaced_step_closure *dsc)
7038 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7039 int store_executed = condition_true (dsc->u.block.cond, status);
7040 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7041 CORE_ADDR stm_insn_addr;
7042 uint32_t pc_val;
7043 long offset;
7044 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7046 /* If condition code fails, there's nothing else to do. */
7047 if (!store_executed)
7048 return;
7050 if (dsc->u.block.increment)
7052 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7054 if (dsc->u.block.before)
7055 pc_stored_at += 4;
7057 else
7059 pc_stored_at = dsc->u.block.xfer_addr;
7061 if (dsc->u.block.before)
7062 pc_stored_at -= 4;
7065 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7066 stm_insn_addr = dsc->scratch_base;
7067 offset = pc_val - stm_insn_addr;
7069 if (debug_displaced)
7070 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7071 "STM instruction\n", offset);
7073 /* Rewrite the stored PC to the proper value for the non-displaced original
7074 instruction. */
7075 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7076 dsc->insn_addr + offset);
7079 /* Clean up an LDM which includes the PC in the register list. We clumped all
7080 the registers in the transferred list into a contiguous range r0...rX (to
7081 avoid loading PC directly and losing control of the debugged program), so we
7082 must undo that here. */
7084 static void
7085 cleanup_block_load_pc (struct gdbarch *gdbarch,
7086 struct regcache *regs,
7087 struct displaced_step_closure *dsc)
7089 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7090 int load_executed = condition_true (dsc->u.block.cond, status);
7091 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7092 unsigned int regs_loaded = bitcount (mask);
7093 unsigned int num_to_shuffle = regs_loaded, clobbered;
7095 /* The method employed here will fail if the register list is fully populated
7096 (we need to avoid loading PC directly). */
7097 gdb_assert (num_to_shuffle < 16);
7099 if (!load_executed)
7100 return;
7102 clobbered = (1 << num_to_shuffle) - 1;
7104 while (num_to_shuffle > 0)
7106 if ((mask & (1 << write_reg)) != 0)
7108 unsigned int read_reg = num_to_shuffle - 1;
7110 if (read_reg != write_reg)
7112 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7113 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7114 if (debug_displaced)
7115 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7116 "loaded register r%d to r%d\n"), read_reg,
7117 write_reg);
7119 else if (debug_displaced)
7120 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7121 "r%d already in the right place\n"),
7122 write_reg);
7124 clobbered &= ~(1 << write_reg);
7126 num_to_shuffle--;
7129 write_reg--;
7132 /* Restore any registers we scribbled over. */
7133 for (write_reg = 0; clobbered != 0; write_reg++)
7135 if ((clobbered & (1 << write_reg)) != 0)
7137 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7138 CANNOT_WRITE_PC);
7139 if (debug_displaced)
7140 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7141 "clobbered register r%d\n"), write_reg);
7142 clobbered &= ~(1 << write_reg);
7146 /* Perform register writeback manually. */
7147 if (dsc->u.block.writeback)
7149 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7151 if (dsc->u.block.increment)
7152 new_rn_val += regs_loaded * 4;
7153 else
7154 new_rn_val -= regs_loaded * 4;
7156 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7157 CANNOT_WRITE_PC);
7161 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7162 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7164 static int
7165 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7166 struct regcache *regs,
7167 struct displaced_step_closure *dsc)
7169 int load = bit (insn, 20);
7170 int user = bit (insn, 22);
7171 int increment = bit (insn, 23);
7172 int before = bit (insn, 24);
7173 int writeback = bit (insn, 21);
7174 int rn = bits (insn, 16, 19);
7176 /* Block transfers which don't mention PC can be run directly
7177 out-of-line. */
7178 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7179 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7181 if (rn == ARM_PC_REGNUM)
7183 warning (_("displaced: Unpredictable LDM or STM with "
7184 "base register r15"));
7185 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7188 if (debug_displaced)
7189 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7190 "%.8lx\n", (unsigned long) insn);
7192 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7193 dsc->u.block.rn = rn;
7195 dsc->u.block.load = load;
7196 dsc->u.block.user = user;
7197 dsc->u.block.increment = increment;
7198 dsc->u.block.before = before;
7199 dsc->u.block.writeback = writeback;
7200 dsc->u.block.cond = bits (insn, 28, 31);
7202 dsc->u.block.regmask = insn & 0xffff;
7204 if (load)
7206 if ((insn & 0xffff) == 0xffff)
7208 /* LDM with a fully-populated register list. This case is
7209 particularly tricky. Implement for now by fully emulating the
7210 instruction (which might not behave perfectly in all cases, but
7211 these instructions should be rare enough for that not to matter
7212 too much). */
7213 dsc->modinsn[0] = ARM_NOP;
7215 dsc->cleanup = &cleanup_block_load_all;
7217 else
7219 /* LDM of a list of registers which includes PC. Implement by
7220 rewriting the list of registers to be transferred into a
7221 contiguous chunk r0...rX before doing the transfer, then shuffling
7222 registers into the correct places in the cleanup routine. */
7223 unsigned int regmask = insn & 0xffff;
7224 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7225 unsigned int to = 0, from = 0, i, new_rn;
7227 for (i = 0; i < num_in_list; i++)
7228 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7230 /* Writeback makes things complicated. We need to avoid clobbering
7231 the base register with one of the registers in our modified
7232 register list, but just using a different register can't work in
7233 all cases, e.g.:
7235 ldm r14!, {r0-r13,pc}
7237 which would need to be rewritten as:
7239 ldm rN!, {r0-r14}
7241 but that can't work, because there's no free register for N.
7243 Solve this by turning off the writeback bit, and emulating
7244 writeback manually in the cleanup routine. */
7246 if (writeback)
7247 insn &= ~(1 << 21);
7249 new_regmask = (1 << num_in_list) - 1;
7251 if (debug_displaced)
7252 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7253 "{..., pc}: original reg list %.4x, modified "
7254 "list %.4x\n"), rn, writeback ? "!" : "",
7255 (int) insn & 0xffff, new_regmask);
7257 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7259 dsc->cleanup = &cleanup_block_load_pc;
7262 else
7264 /* STM of a list of registers which includes PC. Run the instruction
7265 as-is, but out of line: this will store the wrong value for the PC,
7266 so we must manually fix up the memory in the cleanup routine.
7267 Doing things this way has the advantage that we can auto-detect
7268 the offset of the PC write (which is architecture-dependent) in
7269 the cleanup routine. */
7270 dsc->modinsn[0] = insn;
7272 dsc->cleanup = &cleanup_block_store_pc;
7275 return 0;
7278 static int
7279 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7280 struct regcache *regs,
7281 struct displaced_step_closure *dsc)
7283 int rn = bits (insn1, 0, 3);
7284 int load = bit (insn1, 4);
7285 int writeback = bit (insn1, 5);
7287 /* Block transfers which don't mention PC can be run directly
7288 out-of-line. */
7289 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7290 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7292 if (rn == ARM_PC_REGNUM)
7294 warning (_("displaced: Unpredictable LDM or STM with "
7295 "base register r15"));
7296 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7297 "unpredictable ldm/stm", dsc);
7300 if (debug_displaced)
7301 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7302 "%.4x%.4x\n", insn1, insn2);
7304 /* Clear bit 13, since it should be always zero. */
7305 dsc->u.block.regmask = (insn2 & 0xdfff);
7306 dsc->u.block.rn = rn;
7308 dsc->u.block.load = load;
7309 dsc->u.block.user = 0;
7310 dsc->u.block.increment = bit (insn1, 7);
7311 dsc->u.block.before = bit (insn1, 8);
7312 dsc->u.block.writeback = writeback;
7313 dsc->u.block.cond = INST_AL;
7314 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7316 if (load)
7318 if (dsc->u.block.regmask == 0xffff)
7320 /* This branch is impossible to happen. */
7321 gdb_assert (0);
7323 else
7325 unsigned int regmask = dsc->u.block.regmask;
7326 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7327 unsigned int to = 0, from = 0, i, new_rn;
7329 for (i = 0; i < num_in_list; i++)
7330 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7332 if (writeback)
7333 insn1 &= ~(1 << 5);
7335 new_regmask = (1 << num_in_list) - 1;
7337 if (debug_displaced)
7338 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7339 "{..., pc}: original reg list %.4x, modified "
7340 "list %.4x\n"), rn, writeback ? "!" : "",
7341 (int) dsc->u.block.regmask, new_regmask);
7343 dsc->modinsn[0] = insn1;
7344 dsc->modinsn[1] = (new_regmask & 0xffff);
7345 dsc->numinsns = 2;
7347 dsc->cleanup = &cleanup_block_load_pc;
7350 else
7352 dsc->modinsn[0] = insn1;
7353 dsc->modinsn[1] = insn2;
7354 dsc->numinsns = 2;
7355 dsc->cleanup = &cleanup_block_store_pc;
7357 return 0;
7360 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7361 for Linux, where some SVC instructions must be treated specially. */
7363 static void
7364 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7365 struct displaced_step_closure *dsc)
7367 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7369 if (debug_displaced)
7370 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7371 "%.8lx\n", (unsigned long) resume_addr);
7373 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7377 /* Common copy routine for svc instruciton. */
7379 static int
7380 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7381 struct displaced_step_closure *dsc)
7383 /* Preparation: none.
7384 Insn: unmodified svc.
7385 Cleanup: pc <- insn_addr + insn_size. */
7387 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7388 instruction. */
7389 dsc->wrote_to_pc = 1;
7391 /* Allow OS-specific code to override SVC handling. */
7392 if (dsc->u.svc.copy_svc_os)
7393 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7394 else
7396 dsc->cleanup = &cleanup_svc;
7397 return 0;
7401 static int
7402 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7403 struct regcache *regs, struct displaced_step_closure *dsc)
7406 if (debug_displaced)
7407 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7408 (unsigned long) insn);
7410 dsc->modinsn[0] = insn;
7412 return install_svc (gdbarch, regs, dsc);
7415 static int
7416 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7417 struct regcache *regs, struct displaced_step_closure *dsc)
7420 if (debug_displaced)
7421 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7422 insn);
7424 dsc->modinsn[0] = insn;
7426 return install_svc (gdbarch, regs, dsc);
7429 /* Copy undefined instructions. */
7431 static int
7432 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7433 struct displaced_step_closure *dsc)
7435 if (debug_displaced)
7436 fprintf_unfiltered (gdb_stdlog,
7437 "displaced: copying undefined insn %.8lx\n",
7438 (unsigned long) insn);
7440 dsc->modinsn[0] = insn;
7442 return 0;
7445 static int
7446 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7447 struct displaced_step_closure *dsc)
7450 if (debug_displaced)
7451 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7452 "%.4x %.4x\n", (unsigned short) insn1,
7453 (unsigned short) insn2);
7455 dsc->modinsn[0] = insn1;
7456 dsc->modinsn[1] = insn2;
7457 dsc->numinsns = 2;
7459 return 0;
7462 /* Copy unpredictable instructions. */
7464 static int
7465 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7466 struct displaced_step_closure *dsc)
7468 if (debug_displaced)
7469 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7470 "%.8lx\n", (unsigned long) insn);
7472 dsc->modinsn[0] = insn;
7474 return 0;
7477 /* The decode_* functions are instruction decoding helpers. They mostly follow
7478 the presentation in the ARM ARM. */
7480 static int
7481 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7482 struct regcache *regs,
7483 struct displaced_step_closure *dsc)
7485 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7486 unsigned int rn = bits (insn, 16, 19);
7488 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7489 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7490 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7491 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7492 else if ((op1 & 0x60) == 0x20)
7493 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7494 else if ((op1 & 0x71) == 0x40)
7495 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7496 dsc);
7497 else if ((op1 & 0x77) == 0x41)
7498 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7499 else if ((op1 & 0x77) == 0x45)
7500 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7501 else if ((op1 & 0x77) == 0x51)
7503 if (rn != 0xf)
7504 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7505 else
7506 return arm_copy_unpred (gdbarch, insn, dsc);
7508 else if ((op1 & 0x77) == 0x55)
7509 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7510 else if (op1 == 0x57)
7511 switch (op2)
7513 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7514 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7515 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7516 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7517 default: return arm_copy_unpred (gdbarch, insn, dsc);
7519 else if ((op1 & 0x63) == 0x43)
7520 return arm_copy_unpred (gdbarch, insn, dsc);
7521 else if ((op2 & 0x1) == 0x0)
7522 switch (op1 & ~0x80)
7524 case 0x61:
7525 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7526 case 0x65:
7527 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7528 case 0x71: case 0x75:
7529 /* pld/pldw reg. */
7530 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7531 case 0x63: case 0x67: case 0x73: case 0x77:
7532 return arm_copy_unpred (gdbarch, insn, dsc);
7533 default:
7534 return arm_copy_undef (gdbarch, insn, dsc);
7536 else
7537 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7540 static int
7541 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7542 struct regcache *regs,
7543 struct displaced_step_closure *dsc)
7545 if (bit (insn, 27) == 0)
7546 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7547 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7548 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7550 case 0x0: case 0x2:
7551 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7553 case 0x1: case 0x3:
7554 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7556 case 0x4: case 0x5: case 0x6: case 0x7:
7557 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7559 case 0x8:
7560 switch ((insn & 0xe00000) >> 21)
7562 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7563 /* stc/stc2. */
7564 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7566 case 0x2:
7567 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7569 default:
7570 return arm_copy_undef (gdbarch, insn, dsc);
7573 case 0x9:
7575 int rn_f = (bits (insn, 16, 19) == 0xf);
7576 switch ((insn & 0xe00000) >> 21)
7578 case 0x1: case 0x3:
7579 /* ldc/ldc2 imm (undefined for rn == pc). */
7580 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7581 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7583 case 0x2:
7584 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7586 case 0x4: case 0x5: case 0x6: case 0x7:
7587 /* ldc/ldc2 lit (undefined for rn != pc). */
7588 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7589 : arm_copy_undef (gdbarch, insn, dsc);
7591 default:
7592 return arm_copy_undef (gdbarch, insn, dsc);
7596 case 0xa:
7597 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7599 case 0xb:
7600 if (bits (insn, 16, 19) == 0xf)
7601 /* ldc/ldc2 lit. */
7602 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7603 else
7604 return arm_copy_undef (gdbarch, insn, dsc);
7606 case 0xc:
7607 if (bit (insn, 4))
7608 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7609 else
7610 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7612 case 0xd:
7613 if (bit (insn, 4))
7614 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7615 else
7616 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7618 default:
7619 return arm_copy_undef (gdbarch, insn, dsc);
7623 /* Decode miscellaneous instructions in dp/misc encoding space. */
7625 static int
7626 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7627 struct regcache *regs,
7628 struct displaced_step_closure *dsc)
7630 unsigned int op2 = bits (insn, 4, 6);
7631 unsigned int op = bits (insn, 21, 22);
7632 unsigned int op1 = bits (insn, 16, 19);
7634 switch (op2)
7636 case 0x0:
7637 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7639 case 0x1:
7640 if (op == 0x1) /* bx. */
7641 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7642 else if (op == 0x3)
7643 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7644 else
7645 return arm_copy_undef (gdbarch, insn, dsc);
7647 case 0x2:
7648 if (op == 0x1)
7649 /* Not really supported. */
7650 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7651 else
7652 return arm_copy_undef (gdbarch, insn, dsc);
7654 case 0x3:
7655 if (op == 0x1)
7656 return arm_copy_bx_blx_reg (gdbarch, insn,
7657 regs, dsc); /* blx register. */
7658 else
7659 return arm_copy_undef (gdbarch, insn, dsc);
7661 case 0x5:
7662 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7664 case 0x7:
7665 if (op == 0x1)
7666 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7667 else if (op == 0x3)
7668 /* Not really supported. */
7669 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7671 default:
7672 return arm_copy_undef (gdbarch, insn, dsc);
7676 static int
7677 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7678 struct regcache *regs,
7679 struct displaced_step_closure *dsc)
7681 if (bit (insn, 25))
7682 switch (bits (insn, 20, 24))
7684 case 0x10:
7685 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7687 case 0x14:
7688 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7690 case 0x12: case 0x16:
7691 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7693 default:
7694 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7696 else
7698 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7700 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7701 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7702 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7703 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7704 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7705 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7706 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7707 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7708 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7709 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7710 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7711 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7712 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7713 /* 2nd arg means "unpriveleged". */
7714 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7715 dsc);
7718 /* Should be unreachable. */
7719 return 1;
7722 static int
7723 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7724 struct regcache *regs,
7725 struct displaced_step_closure *dsc)
7727 int a = bit (insn, 25), b = bit (insn, 4);
7728 uint32_t op1 = bits (insn, 20, 24);
7729 int rn_f = bits (insn, 16, 19) == 0xf;
7731 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7732 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7733 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7734 else if ((!a && (op1 & 0x17) == 0x02)
7735 || (a && (op1 & 0x17) == 0x02 && !b))
7736 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7737 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7738 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7739 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7740 else if ((!a && (op1 & 0x17) == 0x03)
7741 || (a && (op1 & 0x17) == 0x03 && !b))
7742 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7743 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7744 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7745 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7746 else if ((!a && (op1 & 0x17) == 0x06)
7747 || (a && (op1 & 0x17) == 0x06 && !b))
7748 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7749 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7750 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7751 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7752 else if ((!a && (op1 & 0x17) == 0x07)
7753 || (a && (op1 & 0x17) == 0x07 && !b))
7754 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7756 /* Should be unreachable. */
7757 return 1;
7760 static int
7761 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7762 struct displaced_step_closure *dsc)
7764 switch (bits (insn, 20, 24))
7766 case 0x00: case 0x01: case 0x02: case 0x03:
7767 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7769 case 0x04: case 0x05: case 0x06: case 0x07:
7770 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7772 case 0x08: case 0x09: case 0x0a: case 0x0b:
7773 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7774 return arm_copy_unmodified (gdbarch, insn,
7775 "decode/pack/unpack/saturate/reverse", dsc);
7777 case 0x18:
7778 if (bits (insn, 5, 7) == 0) /* op2. */
7780 if (bits (insn, 12, 15) == 0xf)
7781 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7782 else
7783 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7785 else
7786 return arm_copy_undef (gdbarch, insn, dsc);
7788 case 0x1a: case 0x1b:
7789 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7790 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7791 else
7792 return arm_copy_undef (gdbarch, insn, dsc);
7794 case 0x1c: case 0x1d:
7795 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7797 if (bits (insn, 0, 3) == 0xf)
7798 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7799 else
7800 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7802 else
7803 return arm_copy_undef (gdbarch, insn, dsc);
7805 case 0x1e: case 0x1f:
7806 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7807 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7808 else
7809 return arm_copy_undef (gdbarch, insn, dsc);
7812 /* Should be unreachable. */
7813 return 1;
7816 static int
7817 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7818 struct regcache *regs,
7819 struct displaced_step_closure *dsc)
7821 if (bit (insn, 25))
7822 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7823 else
7824 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7827 static int
7828 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7829 struct regcache *regs,
7830 struct displaced_step_closure *dsc)
7832 unsigned int opcode = bits (insn, 20, 24);
7834 switch (opcode)
7836 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7837 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7839 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7840 case 0x12: case 0x16:
7841 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7843 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7844 case 0x13: case 0x17:
7845 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7847 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7848 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7849 /* Note: no writeback for these instructions. Bit 25 will always be
7850 zero though (via caller), so the following works OK. */
7851 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7854 /* Should be unreachable. */
7855 return 1;
7858 /* Decode shifted register instructions. */
7860 static int
7861 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7862 uint16_t insn2, struct regcache *regs,
7863 struct displaced_step_closure *dsc)
7865 /* PC is only allowed to be used in instruction MOV. */
7867 unsigned int op = bits (insn1, 5, 8);
7868 unsigned int rn = bits (insn1, 0, 3);
7870 if (op == 0x2 && rn == 0xf) /* MOV */
7871 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7872 else
7873 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7874 "dp (shift reg)", dsc);
7878 /* Decode extension register load/store. Exactly the same as
7879 arm_decode_ext_reg_ld_st. */
7881 static int
7882 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7883 uint16_t insn2, struct regcache *regs,
7884 struct displaced_step_closure *dsc)
7886 unsigned int opcode = bits (insn1, 4, 8);
7888 switch (opcode)
7890 case 0x04: case 0x05:
7891 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7892 "vfp/neon vmov", dsc);
7894 case 0x08: case 0x0c: /* 01x00 */
7895 case 0x0a: case 0x0e: /* 01x10 */
7896 case 0x12: case 0x16: /* 10x10 */
7897 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7898 "vfp/neon vstm/vpush", dsc);
7900 case 0x09: case 0x0d: /* 01x01 */
7901 case 0x0b: case 0x0f: /* 01x11 */
7902 case 0x13: case 0x17: /* 10x11 */
7903 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7904 "vfp/neon vldm/vpop", dsc);
7906 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7907 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7908 "vstr", dsc);
7909 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7910 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7913 /* Should be unreachable. */
7914 return 1;
7917 static int
7918 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7919 struct regcache *regs, struct displaced_step_closure *dsc)
7921 unsigned int op1 = bits (insn, 20, 25);
7922 int op = bit (insn, 4);
7923 unsigned int coproc = bits (insn, 8, 11);
7924 unsigned int rn = bits (insn, 16, 19);
7926 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7927 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7928 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7929 && (coproc & 0xe) != 0xa)
7930 /* stc/stc2. */
7931 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7932 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7933 && (coproc & 0xe) != 0xa)
7934 /* ldc/ldc2 imm/lit. */
7935 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7936 else if ((op1 & 0x3e) == 0x00)
7937 return arm_copy_undef (gdbarch, insn, dsc);
7938 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7939 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7940 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7941 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7942 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7943 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7944 else if ((op1 & 0x30) == 0x20 && !op)
7946 if ((coproc & 0xe) == 0xa)
7947 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7948 else
7949 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7951 else if ((op1 & 0x30) == 0x20 && op)
7952 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7953 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7954 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7955 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7956 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7957 else if ((op1 & 0x30) == 0x30)
7958 return arm_copy_svc (gdbarch, insn, regs, dsc);
7959 else
7960 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7963 static int
7964 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7965 uint16_t insn2, struct regcache *regs,
7966 struct displaced_step_closure *dsc)
7968 unsigned int coproc = bits (insn2, 8, 11);
7969 unsigned int op1 = bits (insn1, 4, 9);
7970 unsigned int bit_5_8 = bits (insn1, 5, 8);
7971 unsigned int bit_9 = bit (insn1, 9);
7972 unsigned int bit_4 = bit (insn1, 4);
7973 unsigned int rn = bits (insn1, 0, 3);
7975 if (bit_9 == 0)
7977 if (bit_5_8 == 2)
7978 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7979 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7980 dsc);
7981 else if (bit_5_8 == 0) /* UNDEFINED. */
7982 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7983 else
7985 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7986 if ((coproc & 0xe) == 0xa)
7987 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7988 dsc);
7989 else /* coproc is not 101x. */
7991 if (bit_4 == 0) /* STC/STC2. */
7992 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7993 "stc/stc2", dsc);
7994 else /* LDC/LDC2 {literal, immeidate}. */
7995 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7996 regs, dsc);
8000 else
8001 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
8003 return 0;
8006 static void
8007 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
8008 struct displaced_step_closure *dsc, int rd)
8010 /* ADR Rd, #imm
8012 Rewrite as:
8014 Preparation: Rd <- PC
8015 Insn: ADD Rd, #imm
8016 Cleanup: Null.
8019 /* Rd <- PC */
8020 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8021 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
8024 static int
8025 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
8026 struct displaced_step_closure *dsc,
8027 int rd, unsigned int imm)
8030 /* Encoding T2: ADDS Rd, #imm */
8031 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8033 install_pc_relative (gdbarch, regs, dsc, rd);
8035 return 0;
8038 static int
8039 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8040 struct regcache *regs,
8041 struct displaced_step_closure *dsc)
8043 unsigned int rd = bits (insn, 8, 10);
8044 unsigned int imm8 = bits (insn, 0, 7);
8046 if (debug_displaced)
8047 fprintf_unfiltered (gdb_stdlog,
8048 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8049 rd, imm8, insn);
8051 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8054 static int
8055 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8056 uint16_t insn2, struct regcache *regs,
8057 struct displaced_step_closure *dsc)
8059 unsigned int rd = bits (insn2, 8, 11);
8060 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8061 extract raw immediate encoding rather than computing immediate. When
8062 generating ADD or SUB instruction, we can simply perform OR operation to
8063 set immediate into ADD. */
8064 unsigned int imm_3_8 = insn2 & 0x70ff;
8065 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8067 if (debug_displaced)
8068 fprintf_unfiltered (gdb_stdlog,
8069 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8070 rd, imm_i, imm_3_8, insn1, insn2);
8072 if (bit (insn1, 7)) /* Encoding T2 */
8074 /* Encoding T3: SUB Rd, Rd, #imm */
8075 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8076 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8078 else /* Encoding T3 */
8080 /* Encoding T3: ADD Rd, Rd, #imm */
8081 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8082 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8084 dsc->numinsns = 2;
8086 install_pc_relative (gdbarch, regs, dsc, rd);
8088 return 0;
8091 static int
8092 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8093 struct regcache *regs,
8094 struct displaced_step_closure *dsc)
8096 unsigned int rt = bits (insn1, 8, 10);
8097 unsigned int pc;
8098 int imm8 = (bits (insn1, 0, 7) << 2);
8099 CORE_ADDR from = dsc->insn_addr;
8101 /* LDR Rd, #imm8
8103 Rwrite as:
8105 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8107 Insn: LDR R0, [R2, R3];
8108 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8110 if (debug_displaced)
8111 fprintf_unfiltered (gdb_stdlog,
8112 "displaced: copying thumb ldr r%d [pc #%d]\n"
8113 , rt, imm8);
8115 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8116 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8117 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8118 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8119 /* The assembler calculates the required value of the offset from the
8120 Align(PC,4) value of this instruction to the label. */
8121 pc = pc & 0xfffffffc;
8123 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8124 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8126 dsc->rd = rt;
8127 dsc->u.ldst.xfersize = 4;
8128 dsc->u.ldst.rn = 0;
8129 dsc->u.ldst.immed = 0;
8130 dsc->u.ldst.writeback = 0;
8131 dsc->u.ldst.restore_r4 = 0;
8133 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8135 dsc->cleanup = &cleanup_load;
8137 return 0;
8140 /* Copy Thumb cbnz/cbz insruction. */
8142 static int
8143 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8144 struct regcache *regs,
8145 struct displaced_step_closure *dsc)
8147 int non_zero = bit (insn1, 11);
8148 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8149 CORE_ADDR from = dsc->insn_addr;
8150 int rn = bits (insn1, 0, 2);
8151 int rn_val = displaced_read_reg (regs, dsc, rn);
8153 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8154 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8155 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8156 condition is false, let it be, cleanup_branch will do nothing. */
8157 if (dsc->u.branch.cond)
8159 dsc->u.branch.cond = INST_AL;
8160 dsc->u.branch.dest = from + 4 + imm5;
8162 else
8163 dsc->u.branch.dest = from + 2;
8165 dsc->u.branch.link = 0;
8166 dsc->u.branch.exchange = 0;
8168 if (debug_displaced)
8169 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8170 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8171 rn, rn_val, insn1, dsc->u.branch.dest);
8173 dsc->modinsn[0] = THUMB_NOP;
8175 dsc->cleanup = &cleanup_branch;
8176 return 0;
8179 /* Copy Table Branch Byte/Halfword */
8180 static int
8181 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8182 uint16_t insn2, struct regcache *regs,
8183 struct displaced_step_closure *dsc)
8185 ULONGEST rn_val, rm_val;
8186 int is_tbh = bit (insn2, 4);
8187 CORE_ADDR halfwords = 0;
8188 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8190 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8191 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8193 if (is_tbh)
8195 gdb_byte buf[2];
8197 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8198 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8200 else
8202 gdb_byte buf[1];
8204 target_read_memory (rn_val + rm_val, buf, 1);
8205 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8208 if (debug_displaced)
8209 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8210 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8211 (unsigned int) rn_val, (unsigned int) rm_val,
8212 (unsigned int) halfwords);
8214 dsc->u.branch.cond = INST_AL;
8215 dsc->u.branch.link = 0;
8216 dsc->u.branch.exchange = 0;
8217 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8219 dsc->cleanup = &cleanup_branch;
8221 return 0;
8224 static void
8225 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8226 struct displaced_step_closure *dsc)
8228 /* PC <- r7 */
8229 int val = displaced_read_reg (regs, dsc, 7);
8230 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8232 /* r7 <- r8 */
8233 val = displaced_read_reg (regs, dsc, 8);
8234 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8236 /* r8 <- tmp[0] */
8237 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8241 static int
8242 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8243 struct regcache *regs,
8244 struct displaced_step_closure *dsc)
8246 dsc->u.block.regmask = insn1 & 0x00ff;
8248 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8249 to :
8251 (1) register list is full, that is, r0-r7 are used.
8252 Prepare: tmp[0] <- r8
8254 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8255 MOV r8, r7; Move value of r7 to r8;
8256 POP {r7}; Store PC value into r7.
8258 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8260 (2) register list is not full, supposing there are N registers in
8261 register list (except PC, 0 <= N <= 7).
8262 Prepare: for each i, 0 - N, tmp[i] <- ri.
8264 POP {r0, r1, ...., rN};
8266 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8267 from tmp[] properly.
8269 if (debug_displaced)
8270 fprintf_unfiltered (gdb_stdlog,
8271 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8272 dsc->u.block.regmask, insn1);
8274 if (dsc->u.block.regmask == 0xff)
8276 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8278 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8279 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8280 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8282 dsc->numinsns = 3;
8283 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8285 else
8287 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8288 unsigned int new_regmask, bit = 1;
8289 unsigned int to = 0, from = 0, i, new_rn;
8291 for (i = 0; i < num_in_list + 1; i++)
8292 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8294 new_regmask = (1 << (num_in_list + 1)) - 1;
8296 if (debug_displaced)
8297 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8298 "{..., pc}: original reg list %.4x,"
8299 " modified list %.4x\n"),
8300 (int) dsc->u.block.regmask, new_regmask);
8302 dsc->u.block.regmask |= 0x8000;
8303 dsc->u.block.writeback = 0;
8304 dsc->u.block.cond = INST_AL;
8306 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8308 dsc->cleanup = &cleanup_block_load_pc;
8311 return 0;
8314 static void
8315 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8316 struct regcache *regs,
8317 struct displaced_step_closure *dsc)
8319 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8320 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8321 int err = 0;
8323 /* 16-bit thumb instructions. */
8324 switch (op_bit_12_15)
8326 /* Shift (imme), add, subtract, move and compare. */
8327 case 0: case 1: case 2: case 3:
8328 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8329 "shift/add/sub/mov/cmp",
8330 dsc);
8331 break;
8332 case 4:
8333 switch (op_bit_10_11)
8335 case 0: /* Data-processing */
8336 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8337 "data-processing",
8338 dsc);
8339 break;
8340 case 1: /* Special data instructions and branch and exchange. */
8342 unsigned short op = bits (insn1, 7, 9);
8343 if (op == 6 || op == 7) /* BX or BLX */
8344 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8345 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8346 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8347 else
8348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8349 dsc);
8351 break;
8352 default: /* LDR (literal) */
8353 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8355 break;
8356 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8357 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8358 break;
8359 case 10:
8360 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8361 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8362 else /* Generate SP-relative address */
8363 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8364 break;
8365 case 11: /* Misc 16-bit instructions */
8367 switch (bits (insn1, 8, 11))
8369 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8370 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8371 break;
8372 case 12: case 13: /* POP */
8373 if (bit (insn1, 8)) /* PC is in register list. */
8374 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8375 else
8376 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8377 break;
8378 case 15: /* If-Then, and hints */
8379 if (bits (insn1, 0, 3))
8380 /* If-Then makes up to four following instructions conditional.
8381 IT instruction itself is not conditional, so handle it as a
8382 common unmodified instruction. */
8383 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8384 dsc);
8385 else
8386 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8387 break;
8388 default:
8389 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8392 break;
8393 case 12:
8394 if (op_bit_10_11 < 2) /* Store multiple registers */
8395 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8396 else /* Load multiple registers */
8397 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8398 break;
8399 case 13: /* Conditional branch and supervisor call */
8400 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8401 err = thumb_copy_b (gdbarch, insn1, dsc);
8402 else
8403 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8404 break;
8405 case 14: /* Unconditional branch */
8406 err = thumb_copy_b (gdbarch, insn1, dsc);
8407 break;
8408 default:
8409 err = 1;
8412 if (err)
8413 internal_error (__FILE__, __LINE__,
8414 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8417 static int
8418 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8419 uint16_t insn1, uint16_t insn2,
8420 struct regcache *regs,
8421 struct displaced_step_closure *dsc)
8423 int rt = bits (insn2, 12, 15);
8424 int rn = bits (insn1, 0, 3);
8425 int op1 = bits (insn1, 7, 8);
8426 int err = 0;
8428 switch (bits (insn1, 5, 6))
8430 case 0: /* Load byte and memory hints */
8431 if (rt == 0xf) /* PLD/PLI */
8433 if (rn == 0xf)
8434 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8435 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8436 else
8437 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8438 "pli/pld", dsc);
8440 else
8442 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8443 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8445 else
8446 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8447 "ldrb{reg, immediate}/ldrbt",
8448 dsc);
8451 break;
8452 case 1: /* Load halfword and memory hints. */
8453 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8454 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8455 "pld/unalloc memhint", dsc);
8456 else
8458 if (rn == 0xf)
8459 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8461 else
8462 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8463 "ldrh/ldrht", dsc);
8465 break;
8466 case 2: /* Load word */
8468 int insn2_bit_8_11 = bits (insn2, 8, 11);
8470 if (rn == 0xf)
8471 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8472 else if (op1 == 0x1) /* Encoding T3 */
8473 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8474 0, 1);
8475 else /* op1 == 0x0 */
8477 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8478 /* LDR (immediate) */
8479 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8480 dsc, bit (insn2, 8), 1);
8481 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8482 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8483 "ldrt", dsc);
8484 else
8485 /* LDR (register) */
8486 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8487 dsc, 0, 0);
8489 break;
8491 default:
8492 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8493 break;
8495 return 0;
8498 static void
8499 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8500 uint16_t insn2, struct regcache *regs,
8501 struct displaced_step_closure *dsc)
8503 int err = 0;
8504 unsigned short op = bit (insn2, 15);
8505 unsigned int op1 = bits (insn1, 11, 12);
8507 switch (op1)
8509 case 1:
8511 switch (bits (insn1, 9, 10))
8513 case 0:
8514 if (bit (insn1, 6))
8516 /* Load/store {dual, execlusive}, table branch. */
8517 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8518 && bits (insn2, 5, 7) == 0)
8519 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8520 dsc);
8521 else
8522 /* PC is not allowed to use in load/store {dual, exclusive}
8523 instructions. */
8524 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8525 "load/store dual/ex", dsc);
8527 else /* load/store multiple */
8529 switch (bits (insn1, 7, 8))
8531 case 0: case 3: /* SRS, RFE */
8532 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8533 "srs/rfe", dsc);
8534 break;
8535 case 1: case 2: /* LDM/STM/PUSH/POP */
8536 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8537 break;
8540 break;
8542 case 1:
8543 /* Data-processing (shift register). */
8544 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8545 dsc);
8546 break;
8547 default: /* Coprocessor instructions. */
8548 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8549 break;
8551 break;
8553 case 2: /* op1 = 2 */
8554 if (op) /* Branch and misc control. */
8556 if (bit (insn2, 14) /* BLX/BL */
8557 || bit (insn2, 12) /* Unconditional branch */
8558 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8559 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8560 else
8561 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8562 "misc ctrl", dsc);
8564 else
8566 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8568 int op = bits (insn1, 4, 8);
8569 int rn = bits (insn1, 0, 3);
8570 if ((op == 0 || op == 0xa) && rn == 0xf)
8571 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8572 regs, dsc);
8573 else
8574 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8575 "dp/pb", dsc);
8577 else /* Data processing (modified immeidate) */
8578 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8579 "dp/mi", dsc);
8581 break;
8582 case 3: /* op1 = 3 */
8583 switch (bits (insn1, 9, 10))
8585 case 0:
8586 if (bit (insn1, 4))
8587 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8588 regs, dsc);
8589 else /* NEON Load/Store and Store single data item */
8590 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8591 "neon elt/struct load/store",
8592 dsc);
8593 break;
8594 case 1: /* op1 = 3, bits (9, 10) == 1 */
8595 switch (bits (insn1, 7, 8))
8597 case 0: case 1: /* Data processing (register) */
8598 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8599 "dp(reg)", dsc);
8600 break;
8601 case 2: /* Multiply and absolute difference */
8602 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8603 "mul/mua/diff", dsc);
8604 break;
8605 case 3: /* Long multiply and divide */
8606 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8607 "lmul/lmua", dsc);
8608 break;
8610 break;
8611 default: /* Coprocessor instructions */
8612 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8613 break;
8615 break;
8616 default:
8617 err = 1;
8620 if (err)
8621 internal_error (__FILE__, __LINE__,
8622 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8626 static void
8627 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8628 CORE_ADDR to, struct regcache *regs,
8629 struct displaced_step_closure *dsc)
8631 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8632 uint16_t insn1
8633 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8635 if (debug_displaced)
8636 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8637 "at %.8lx\n", insn1, (unsigned long) from);
8639 dsc->is_thumb = 1;
8640 dsc->insn_size = thumb_insn_size (insn1);
8641 if (thumb_insn_size (insn1) == 4)
8643 uint16_t insn2
8644 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8645 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8647 else
8648 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8651 void
8652 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8653 CORE_ADDR to, struct regcache *regs,
8654 struct displaced_step_closure *dsc)
8656 int err = 0;
8657 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8658 uint32_t insn;
8660 /* Most displaced instructions use a 1-instruction scratch space, so set this
8661 here and override below if/when necessary. */
8662 dsc->numinsns = 1;
8663 dsc->insn_addr = from;
8664 dsc->scratch_base = to;
8665 dsc->cleanup = NULL;
8666 dsc->wrote_to_pc = 0;
8668 if (!displaced_in_arm_mode (regs))
8669 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8671 dsc->is_thumb = 0;
8672 dsc->insn_size = 4;
8673 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8674 if (debug_displaced)
8675 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8676 "at %.8lx\n", (unsigned long) insn,
8677 (unsigned long) from);
8679 if ((insn & 0xf0000000) == 0xf0000000)
8680 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8681 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8683 case 0x0: case 0x1: case 0x2: case 0x3:
8684 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8685 break;
8687 case 0x4: case 0x5: case 0x6:
8688 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8689 break;
8691 case 0x7:
8692 err = arm_decode_media (gdbarch, insn, dsc);
8693 break;
8695 case 0x8: case 0x9: case 0xa: case 0xb:
8696 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8697 break;
8699 case 0xc: case 0xd: case 0xe: case 0xf:
8700 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8701 break;
8704 if (err)
8705 internal_error (__FILE__, __LINE__,
8706 _("arm_process_displaced_insn: Instruction decode error"));
8709 /* Actually set up the scratch space for a displaced instruction. */
8711 void
8712 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8713 CORE_ADDR to, struct displaced_step_closure *dsc)
8715 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8716 unsigned int i, len, offset;
8717 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8718 int size = dsc->is_thumb? 2 : 4;
8719 const gdb_byte *bkp_insn;
8721 offset = 0;
8722 /* Poke modified instruction(s). */
8723 for (i = 0; i < dsc->numinsns; i++)
8725 if (debug_displaced)
8727 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8728 if (size == 4)
8729 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8730 dsc->modinsn[i]);
8731 else if (size == 2)
8732 fprintf_unfiltered (gdb_stdlog, "%.4x",
8733 (unsigned short)dsc->modinsn[i]);
8735 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8736 (unsigned long) to + offset);
8739 write_memory_unsigned_integer (to + offset, size,
8740 byte_order_for_code,
8741 dsc->modinsn[i]);
8742 offset += size;
8745 /* Choose the correct breakpoint instruction. */
8746 if (dsc->is_thumb)
8748 bkp_insn = tdep->thumb_breakpoint;
8749 len = tdep->thumb_breakpoint_size;
8751 else
8753 bkp_insn = tdep->arm_breakpoint;
8754 len = tdep->arm_breakpoint_size;
8757 /* Put breakpoint afterwards. */
8758 write_memory (to + offset, bkp_insn, len);
8760 if (debug_displaced)
8761 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8762 paddress (gdbarch, from), paddress (gdbarch, to));
8765 /* Entry point for copying an instruction into scratch space for displaced
8766 stepping. */
8768 struct displaced_step_closure *
8769 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8770 CORE_ADDR from, CORE_ADDR to,
8771 struct regcache *regs)
8773 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
8775 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8776 arm_displaced_init_closure (gdbarch, from, to, dsc);
8778 return dsc;
8781 /* Entry point for cleaning things up after a displaced instruction has been
8782 single-stepped. */
8784 void
8785 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8786 struct displaced_step_closure *dsc,
8787 CORE_ADDR from, CORE_ADDR to,
8788 struct regcache *regs)
8790 if (dsc->cleanup)
8791 dsc->cleanup (gdbarch, regs, dsc);
8793 if (!dsc->wrote_to_pc)
8794 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8795 dsc->insn_addr + dsc->insn_size);
8799 #include "bfd-in2.h"
8800 #include "libcoff.h"
8802 static int
8803 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8805 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
8807 if (arm_pc_is_thumb (gdbarch, memaddr))
8809 static asymbol *asym;
8810 static combined_entry_type ce;
8811 static struct coff_symbol_struct csym;
8812 static struct bfd fake_bfd;
8813 static bfd_target fake_target;
8815 if (csym.native == NULL)
8817 /* Create a fake symbol vector containing a Thumb symbol.
8818 This is solely so that the code in print_insn_little_arm()
8819 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8820 the presence of a Thumb symbol and switch to decoding
8821 Thumb instructions. */
8823 fake_target.flavour = bfd_target_coff_flavour;
8824 fake_bfd.xvec = &fake_target;
8825 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8826 csym.native = &ce;
8827 csym.symbol.the_bfd = &fake_bfd;
8828 csym.symbol.name = "fake";
8829 asym = (asymbol *) & csym;
8832 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8833 info->symbols = &asym;
8835 else
8836 info->symbols = NULL;
8838 if (info->endian == BFD_ENDIAN_BIG)
8839 return print_insn_big_arm (memaddr, info);
8840 else
8841 return print_insn_little_arm (memaddr, info);
8844 /* The following define instruction sequences that will cause ARM
8845 cpu's to take an undefined instruction trap. These are used to
8846 signal a breakpoint to GDB.
8848 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8849 modes. A different instruction is required for each mode. The ARM
8850 cpu's can also be big or little endian. Thus four different
8851 instructions are needed to support all cases.
8853 Note: ARMv4 defines several new instructions that will take the
8854 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8855 not in fact add the new instructions. The new undefined
8856 instructions in ARMv4 are all instructions that had no defined
8857 behaviour in earlier chips. There is no guarantee that they will
8858 raise an exception, but may be treated as NOP's. In practice, it
8859 may only safe to rely on instructions matching:
8861 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8862 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8863 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8865 Even this may only true if the condition predicate is true. The
8866 following use a condition predicate of ALWAYS so it is always TRUE.
8868 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8869 and NetBSD all use a software interrupt rather than an undefined
8870 instruction to force a trap. This can be handled by by the
8871 abi-specific code during establishment of the gdbarch vector. */
8873 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8874 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8875 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8876 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8878 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8879 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8880 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8881 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8883 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8884 the program counter value to determine whether a 16-bit or 32-bit
8885 breakpoint should be used. It returns a pointer to a string of
8886 bytes that encode a breakpoint instruction, stores the length of
8887 the string to *lenptr, and adjusts the program counter (if
8888 necessary) to point to the actual memory location where the
8889 breakpoint should be inserted. */
8891 static const unsigned char *
8892 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8894 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8895 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8897 if (arm_pc_is_thumb (gdbarch, *pcptr))
8899 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8901 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8902 check whether we are replacing a 32-bit instruction. */
8903 if (tdep->thumb2_breakpoint != NULL)
8905 gdb_byte buf[2];
8906 if (target_read_memory (*pcptr, buf, 2) == 0)
8908 unsigned short inst1;
8909 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8910 if (thumb_insn_size (inst1) == 4)
8912 *lenptr = tdep->thumb2_breakpoint_size;
8913 return tdep->thumb2_breakpoint;
8918 *lenptr = tdep->thumb_breakpoint_size;
8919 return tdep->thumb_breakpoint;
8921 else
8923 *lenptr = tdep->arm_breakpoint_size;
8924 return tdep->arm_breakpoint;
8928 static void
8929 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8930 int *kindptr)
8932 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8934 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8935 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8936 that this is not confused with a 32-bit ARM breakpoint. */
8937 *kindptr = 3;
8940 /* Extract from an array REGBUF containing the (raw) register state a
8941 function return value of type TYPE, and copy that, in virtual
8942 format, into VALBUF. */
8944 static void
8945 arm_extract_return_value (struct type *type, struct regcache *regs,
8946 gdb_byte *valbuf)
8948 struct gdbarch *gdbarch = get_regcache_arch (regs);
8949 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8951 if (TYPE_CODE_FLT == TYPE_CODE (type))
8953 switch (gdbarch_tdep (gdbarch)->fp_model)
8955 case ARM_FLOAT_FPA:
8957 /* The value is in register F0 in internal format. We need to
8958 extract the raw value and then convert it to the desired
8959 internal type. */
8960 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8962 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8963 convert_from_extended (floatformat_from_type (type), tmpbuf,
8964 valbuf, gdbarch_byte_order (gdbarch));
8966 break;
8968 case ARM_FLOAT_SOFT_FPA:
8969 case ARM_FLOAT_SOFT_VFP:
8970 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8971 not using the VFP ABI code. */
8972 case ARM_FLOAT_VFP:
8973 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8974 if (TYPE_LENGTH (type) > 4)
8975 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8976 valbuf + INT_REGISTER_SIZE);
8977 break;
8979 default:
8980 internal_error (__FILE__, __LINE__,
8981 _("arm_extract_return_value: "
8982 "Floating point model not supported"));
8983 break;
8986 else if (TYPE_CODE (type) == TYPE_CODE_INT
8987 || TYPE_CODE (type) == TYPE_CODE_CHAR
8988 || TYPE_CODE (type) == TYPE_CODE_BOOL
8989 || TYPE_CODE (type) == TYPE_CODE_PTR
8990 || TYPE_CODE (type) == TYPE_CODE_REF
8991 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8993 /* If the type is a plain integer, then the access is
8994 straight-forward. Otherwise we have to play around a bit
8995 more. */
8996 int len = TYPE_LENGTH (type);
8997 int regno = ARM_A1_REGNUM;
8998 ULONGEST tmp;
9000 while (len > 0)
9002 /* By using store_unsigned_integer we avoid having to do
9003 anything special for small big-endian values. */
9004 regcache_cooked_read_unsigned (regs, regno++, &tmp);
9005 store_unsigned_integer (valbuf,
9006 (len > INT_REGISTER_SIZE
9007 ? INT_REGISTER_SIZE : len),
9008 byte_order, tmp);
9009 len -= INT_REGISTER_SIZE;
9010 valbuf += INT_REGISTER_SIZE;
9013 else
9015 /* For a structure or union the behaviour is as if the value had
9016 been stored to word-aligned memory and then loaded into
9017 registers with 32-bit load instruction(s). */
9018 int len = TYPE_LENGTH (type);
9019 int regno = ARM_A1_REGNUM;
9020 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9022 while (len > 0)
9024 regcache_cooked_read (regs, regno++, tmpbuf);
9025 memcpy (valbuf, tmpbuf,
9026 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9027 len -= INT_REGISTER_SIZE;
9028 valbuf += INT_REGISTER_SIZE;
9034 /* Will a function return an aggregate type in memory or in a
9035 register? Return 0 if an aggregate type can be returned in a
9036 register, 1 if it must be returned in memory. */
9038 static int
9039 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9041 enum type_code code;
9043 type = check_typedef (type);
9045 /* Simple, non-aggregate types (ie not including vectors and
9046 complex) are always returned in a register (or registers). */
9047 code = TYPE_CODE (type);
9048 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9049 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9050 return 0;
9052 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
9054 /* Vector values should be returned using ARM registers if they
9055 are not over 16 bytes. */
9056 return (TYPE_LENGTH (type) > 16);
9059 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9061 /* The AAPCS says all aggregates not larger than a word are returned
9062 in a register. */
9063 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
9064 return 0;
9066 return 1;
9068 else
9070 int nRc;
9072 /* All aggregate types that won't fit in a register must be returned
9073 in memory. */
9074 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9075 return 1;
9077 /* In the ARM ABI, "integer" like aggregate types are returned in
9078 registers. For an aggregate type to be integer like, its size
9079 must be less than or equal to INT_REGISTER_SIZE and the
9080 offset of each addressable subfield must be zero. Note that bit
9081 fields are not addressable, and all addressable subfields of
9082 unions always start at offset zero.
9084 This function is based on the behaviour of GCC 2.95.1.
9085 See: gcc/arm.c: arm_return_in_memory() for details.
9087 Note: All versions of GCC before GCC 2.95.2 do not set up the
9088 parameters correctly for a function returning the following
9089 structure: struct { float f;}; This should be returned in memory,
9090 not a register. Richard Earnshaw sent me a patch, but I do not
9091 know of any way to detect if a function like the above has been
9092 compiled with the correct calling convention. */
9094 /* Assume all other aggregate types can be returned in a register.
9095 Run a check for structures, unions and arrays. */
9096 nRc = 0;
9098 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9100 int i;
9101 /* Need to check if this struct/union is "integer" like. For
9102 this to be true, its size must be less than or equal to
9103 INT_REGISTER_SIZE and the offset of each addressable
9104 subfield must be zero. Note that bit fields are not
9105 addressable, and unions always start at offset zero. If any
9106 of the subfields is a floating point type, the struct/union
9107 cannot be an integer type. */
9109 /* For each field in the object, check:
9110 1) Is it FP? --> yes, nRc = 1;
9111 2) Is it addressable (bitpos != 0) and
9112 not packed (bitsize == 0)?
9113 --> yes, nRc = 1
9116 for (i = 0; i < TYPE_NFIELDS (type); i++)
9118 enum type_code field_type_code;
9120 field_type_code
9121 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9122 i)));
9124 /* Is it a floating point type field? */
9125 if (field_type_code == TYPE_CODE_FLT)
9127 nRc = 1;
9128 break;
9131 /* If bitpos != 0, then we have to care about it. */
9132 if (TYPE_FIELD_BITPOS (type, i) != 0)
9134 /* Bitfields are not addressable. If the field bitsize is
9135 zero, then the field is not packed. Hence it cannot be
9136 a bitfield or any other packed type. */
9137 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9139 nRc = 1;
9140 break;
9146 return nRc;
9150 /* Write into appropriate registers a function return value of type
9151 TYPE, given in virtual format. */
9153 static void
9154 arm_store_return_value (struct type *type, struct regcache *regs,
9155 const gdb_byte *valbuf)
9157 struct gdbarch *gdbarch = get_regcache_arch (regs);
9158 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9160 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9162 gdb_byte buf[MAX_REGISTER_SIZE];
9164 switch (gdbarch_tdep (gdbarch)->fp_model)
9166 case ARM_FLOAT_FPA:
9168 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9169 gdbarch_byte_order (gdbarch));
9170 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9171 break;
9173 case ARM_FLOAT_SOFT_FPA:
9174 case ARM_FLOAT_SOFT_VFP:
9175 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9176 not using the VFP ABI code. */
9177 case ARM_FLOAT_VFP:
9178 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9179 if (TYPE_LENGTH (type) > 4)
9180 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9181 valbuf + INT_REGISTER_SIZE);
9182 break;
9184 default:
9185 internal_error (__FILE__, __LINE__,
9186 _("arm_store_return_value: Floating "
9187 "point model not supported"));
9188 break;
9191 else if (TYPE_CODE (type) == TYPE_CODE_INT
9192 || TYPE_CODE (type) == TYPE_CODE_CHAR
9193 || TYPE_CODE (type) == TYPE_CODE_BOOL
9194 || TYPE_CODE (type) == TYPE_CODE_PTR
9195 || TYPE_CODE (type) == TYPE_CODE_REF
9196 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9198 if (TYPE_LENGTH (type) <= 4)
9200 /* Values of one word or less are zero/sign-extended and
9201 returned in r0. */
9202 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9203 LONGEST val = unpack_long (type, valbuf);
9205 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9206 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9208 else
9210 /* Integral values greater than one word are stored in consecutive
9211 registers starting with r0. This will always be a multiple of
9212 the regiser size. */
9213 int len = TYPE_LENGTH (type);
9214 int regno = ARM_A1_REGNUM;
9216 while (len > 0)
9218 regcache_cooked_write (regs, regno++, valbuf);
9219 len -= INT_REGISTER_SIZE;
9220 valbuf += INT_REGISTER_SIZE;
9224 else
9226 /* For a structure or union the behaviour is as if the value had
9227 been stored to word-aligned memory and then loaded into
9228 registers with 32-bit load instruction(s). */
9229 int len = TYPE_LENGTH (type);
9230 int regno = ARM_A1_REGNUM;
9231 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9233 while (len > 0)
9235 memcpy (tmpbuf, valbuf,
9236 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9237 regcache_cooked_write (regs, regno++, tmpbuf);
9238 len -= INT_REGISTER_SIZE;
9239 valbuf += INT_REGISTER_SIZE;
9245 /* Handle function return values. */
9247 static enum return_value_convention
9248 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9249 struct type *valtype, struct regcache *regcache,
9250 gdb_byte *readbuf, const gdb_byte *writebuf)
9252 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9253 struct type *func_type = function ? value_type (function) : NULL;
9254 enum arm_vfp_cprc_base_type vfp_base_type;
9255 int vfp_base_count;
9257 if (arm_vfp_abi_for_function (gdbarch, func_type)
9258 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9260 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9261 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9262 int i;
9263 for (i = 0; i < vfp_base_count; i++)
9265 if (reg_char == 'q')
9267 if (writebuf)
9268 arm_neon_quad_write (gdbarch, regcache, i,
9269 writebuf + i * unit_length);
9271 if (readbuf)
9272 arm_neon_quad_read (gdbarch, regcache, i,
9273 readbuf + i * unit_length);
9275 else
9277 char name_buf[4];
9278 int regnum;
9280 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9281 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9282 strlen (name_buf));
9283 if (writebuf)
9284 regcache_cooked_write (regcache, regnum,
9285 writebuf + i * unit_length);
9286 if (readbuf)
9287 regcache_cooked_read (regcache, regnum,
9288 readbuf + i * unit_length);
9291 return RETURN_VALUE_REGISTER_CONVENTION;
9294 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9295 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9296 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9298 if (tdep->struct_return == pcc_struct_return
9299 || arm_return_in_memory (gdbarch, valtype))
9300 return RETURN_VALUE_STRUCT_CONVENTION;
9302 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
9304 if (arm_return_in_memory (gdbarch, valtype))
9305 return RETURN_VALUE_STRUCT_CONVENTION;
9308 if (writebuf)
9309 arm_store_return_value (valtype, regcache, writebuf);
9311 if (readbuf)
9312 arm_extract_return_value (valtype, regcache, readbuf);
9314 return RETURN_VALUE_REGISTER_CONVENTION;
9318 static int
9319 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9321 struct gdbarch *gdbarch = get_frame_arch (frame);
9322 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9323 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9324 CORE_ADDR jb_addr;
9325 gdb_byte buf[INT_REGISTER_SIZE];
9327 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9329 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9330 INT_REGISTER_SIZE))
9331 return 0;
9333 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9334 return 1;
9337 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9338 return the target PC. Otherwise return 0. */
9340 CORE_ADDR
9341 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9343 const char *name;
9344 int namelen;
9345 CORE_ADDR start_addr;
9347 /* Find the starting address and name of the function containing the PC. */
9348 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9350 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9351 check here. */
9352 start_addr = arm_skip_bx_reg (frame, pc);
9353 if (start_addr != 0)
9354 return start_addr;
9356 return 0;
9359 /* If PC is in a Thumb call or return stub, return the address of the
9360 target PC, which is in a register. The thunk functions are called
9361 _call_via_xx, where x is the register name. The possible names
9362 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9363 functions, named __ARM_call_via_r[0-7]. */
9364 if (startswith (name, "_call_via_")
9365 || startswith (name, "__ARM_call_via_"))
9367 /* Use the name suffix to determine which register contains the
9368 target PC. */
9369 static char *table[15] =
9370 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9371 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9373 int regno;
9374 int offset = strlen (name) - 2;
9376 for (regno = 0; regno <= 14; regno++)
9377 if (strcmp (&name[offset], table[regno]) == 0)
9378 return get_frame_register_unsigned (frame, regno);
9381 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9382 non-interworking calls to foo. We could decode the stubs
9383 to find the target but it's easier to use the symbol table. */
9384 namelen = strlen (name);
9385 if (name[0] == '_' && name[1] == '_'
9386 && ((namelen > 2 + strlen ("_from_thumb")
9387 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9388 || (namelen > 2 + strlen ("_from_arm")
9389 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9391 char *target_name;
9392 int target_len = namelen - 2;
9393 struct bound_minimal_symbol minsym;
9394 struct objfile *objfile;
9395 struct obj_section *sec;
9397 if (name[namelen - 1] == 'b')
9398 target_len -= strlen ("_from_thumb");
9399 else
9400 target_len -= strlen ("_from_arm");
9402 target_name = (char *) alloca (target_len + 1);
9403 memcpy (target_name, name + 2, target_len);
9404 target_name[target_len] = '\0';
9406 sec = find_pc_section (pc);
9407 objfile = (sec == NULL) ? NULL : sec->objfile;
9408 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9409 if (minsym.minsym != NULL)
9410 return BMSYMBOL_VALUE_ADDRESS (minsym);
9411 else
9412 return 0;
9415 return 0; /* not a stub */
9418 static void
9419 set_arm_command (char *args, int from_tty)
9421 printf_unfiltered (_("\
9422 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9423 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9426 static void
9427 show_arm_command (char *args, int from_tty)
9429 cmd_show_list (showarmcmdlist, from_tty, "");
9432 static void
9433 arm_update_current_architecture (void)
9435 struct gdbarch_info info;
9437 /* If the current architecture is not ARM, we have nothing to do. */
9438 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9439 return;
9441 /* Update the architecture. */
9442 gdbarch_info_init (&info);
9444 if (!gdbarch_update_p (info))
9445 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9448 static void
9449 set_fp_model_sfunc (char *args, int from_tty,
9450 struct cmd_list_element *c)
9452 int fp_model;
9454 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9455 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9457 arm_fp_model = (enum arm_float_model) fp_model;
9458 break;
9461 if (fp_model == ARM_FLOAT_LAST)
9462 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9463 current_fp_model);
9465 arm_update_current_architecture ();
9468 static void
9469 show_fp_model (struct ui_file *file, int from_tty,
9470 struct cmd_list_element *c, const char *value)
9472 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9474 if (arm_fp_model == ARM_FLOAT_AUTO
9475 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9476 fprintf_filtered (file, _("\
9477 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9478 fp_model_strings[tdep->fp_model]);
9479 else
9480 fprintf_filtered (file, _("\
9481 The current ARM floating point model is \"%s\".\n"),
9482 fp_model_strings[arm_fp_model]);
9485 static void
9486 arm_set_abi (char *args, int from_tty,
9487 struct cmd_list_element *c)
9489 int arm_abi;
9491 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9492 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9494 arm_abi_global = (enum arm_abi_kind) arm_abi;
9495 break;
9498 if (arm_abi == ARM_ABI_LAST)
9499 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9500 arm_abi_string);
9502 arm_update_current_architecture ();
9505 static void
9506 arm_show_abi (struct ui_file *file, int from_tty,
9507 struct cmd_list_element *c, const char *value)
9509 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9511 if (arm_abi_global == ARM_ABI_AUTO
9512 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9513 fprintf_filtered (file, _("\
9514 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9515 arm_abi_strings[tdep->arm_abi]);
9516 else
9517 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9518 arm_abi_string);
9521 static void
9522 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9523 struct cmd_list_element *c, const char *value)
9525 fprintf_filtered (file,
9526 _("The current execution mode assumed "
9527 "(when symbols are unavailable) is \"%s\".\n"),
9528 arm_fallback_mode_string);
9531 static void
9532 arm_show_force_mode (struct ui_file *file, int from_tty,
9533 struct cmd_list_element *c, const char *value)
9535 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9537 fprintf_filtered (file,
9538 _("The current execution mode assumed "
9539 "(even when symbols are available) is \"%s\".\n"),
9540 arm_force_mode_string);
9543 /* If the user changes the register disassembly style used for info
9544 register and other commands, we have to also switch the style used
9545 in opcodes for disassembly output. This function is run in the "set
9546 arm disassembly" command, and does that. */
9548 static void
9549 set_disassembly_style_sfunc (char *args, int from_tty,
9550 struct cmd_list_element *c)
9552 set_disassembly_style ();
9555 /* Return the ARM register name corresponding to register I. */
9556 static const char *
9557 arm_register_name (struct gdbarch *gdbarch, int i)
9559 const int num_regs = gdbarch_num_regs (gdbarch);
9561 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9562 && i >= num_regs && i < num_regs + 32)
9564 static const char *const vfp_pseudo_names[] = {
9565 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9566 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9567 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9568 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9571 return vfp_pseudo_names[i - num_regs];
9574 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9575 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9577 static const char *const neon_pseudo_names[] = {
9578 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9579 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9582 return neon_pseudo_names[i - num_regs - 32];
9585 if (i >= ARRAY_SIZE (arm_register_names))
9586 /* These registers are only supported on targets which supply
9587 an XML description. */
9588 return "";
9590 return arm_register_names[i];
9593 static void
9594 set_disassembly_style (void)
9596 int current;
9598 /* Find the style that the user wants. */
9599 for (current = 0; current < num_disassembly_options; current++)
9600 if (disassembly_style == valid_disassembly_styles[current])
9601 break;
9602 gdb_assert (current < num_disassembly_options);
9604 /* Synchronize the disassembler. */
9605 set_arm_regname_option (current);
9608 /* Test whether the coff symbol specific value corresponds to a Thumb
9609 function. */
9611 static int
9612 coff_sym_is_thumb (int val)
9614 return (val == C_THUMBEXT
9615 || val == C_THUMBSTAT
9616 || val == C_THUMBEXTFUNC
9617 || val == C_THUMBSTATFUNC
9618 || val == C_THUMBLABEL);
9621 /* arm_coff_make_msymbol_special()
9622 arm_elf_make_msymbol_special()
9624 These functions test whether the COFF or ELF symbol corresponds to
9625 an address in thumb code, and set a "special" bit in a minimal
9626 symbol to indicate that it does. */
9628 static void
9629 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9631 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9632 == ST_BRANCH_TO_THUMB)
9633 MSYMBOL_SET_SPECIAL (msym);
9636 static void
9637 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9639 if (coff_sym_is_thumb (val))
9640 MSYMBOL_SET_SPECIAL (msym);
9643 static void
9644 arm_objfile_data_free (struct objfile *objfile, void *arg)
9646 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
9647 unsigned int i;
9649 for (i = 0; i < objfile->obfd->section_count; i++)
9650 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9653 static void
9654 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9655 asymbol *sym)
9657 const char *name = bfd_asymbol_name (sym);
9658 struct arm_per_objfile *data;
9659 VEC(arm_mapping_symbol_s) **map_p;
9660 struct arm_mapping_symbol new_map_sym;
9662 gdb_assert (name[0] == '$');
9663 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9664 return;
9666 data = (struct arm_per_objfile *) objfile_data (objfile,
9667 arm_objfile_data_key);
9668 if (data == NULL)
9670 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9671 struct arm_per_objfile);
9672 set_objfile_data (objfile, arm_objfile_data_key, data);
9673 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9674 objfile->obfd->section_count,
9675 VEC(arm_mapping_symbol_s) *);
9677 map_p = &data->section_maps[bfd_get_section (sym)->index];
9679 new_map_sym.value = sym->value;
9680 new_map_sym.type = name[1];
9682 /* Assume that most mapping symbols appear in order of increasing
9683 value. If they were randomly distributed, it would be faster to
9684 always push here and then sort at first use. */
9685 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9687 struct arm_mapping_symbol *prev_map_sym;
9689 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9690 if (prev_map_sym->value >= sym->value)
9692 unsigned int idx;
9693 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9694 arm_compare_mapping_symbols);
9695 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9696 return;
9700 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9703 static void
9704 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9706 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9707 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9709 /* If necessary, set the T bit. */
9710 if (arm_apcs_32)
9712 ULONGEST val, t_bit;
9713 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9714 t_bit = arm_psr_thumb_bit (gdbarch);
9715 if (arm_pc_is_thumb (gdbarch, pc))
9716 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9717 val | t_bit);
9718 else
9719 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9720 val & ~t_bit);
9724 /* Read the contents of a NEON quad register, by reading from two
9725 double registers. This is used to implement the quad pseudo
9726 registers, and for argument passing in case the quad registers are
9727 missing; vectors are passed in quad registers when using the VFP
9728 ABI, even if a NEON unit is not present. REGNUM is the index of
9729 the quad register, in [0, 15]. */
9731 static enum register_status
9732 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9733 int regnum, gdb_byte *buf)
9735 char name_buf[4];
9736 gdb_byte reg_buf[8];
9737 int offset, double_regnum;
9738 enum register_status status;
9740 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9741 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9742 strlen (name_buf));
9744 /* d0 is always the least significant half of q0. */
9745 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9746 offset = 8;
9747 else
9748 offset = 0;
9750 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9751 if (status != REG_VALID)
9752 return status;
9753 memcpy (buf + offset, reg_buf, 8);
9755 offset = 8 - offset;
9756 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9757 if (status != REG_VALID)
9758 return status;
9759 memcpy (buf + offset, reg_buf, 8);
9761 return REG_VALID;
9764 static enum register_status
9765 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9766 int regnum, gdb_byte *buf)
9768 const int num_regs = gdbarch_num_regs (gdbarch);
9769 char name_buf[4];
9770 gdb_byte reg_buf[8];
9771 int offset, double_regnum;
9773 gdb_assert (regnum >= num_regs);
9774 regnum -= num_regs;
9776 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9777 /* Quad-precision register. */
9778 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9779 else
9781 enum register_status status;
9783 /* Single-precision register. */
9784 gdb_assert (regnum < 32);
9786 /* s0 is always the least significant half of d0. */
9787 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9788 offset = (regnum & 1) ? 0 : 4;
9789 else
9790 offset = (regnum & 1) ? 4 : 0;
9792 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9793 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9794 strlen (name_buf));
9796 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9797 if (status == REG_VALID)
9798 memcpy (buf, reg_buf + offset, 4);
9799 return status;
9803 /* Store the contents of BUF to a NEON quad register, by writing to
9804 two double registers. This is used to implement the quad pseudo
9805 registers, and for argument passing in case the quad registers are
9806 missing; vectors are passed in quad registers when using the VFP
9807 ABI, even if a NEON unit is not present. REGNUM is the index
9808 of the quad register, in [0, 15]. */
9810 static void
9811 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9812 int regnum, const gdb_byte *buf)
9814 char name_buf[4];
9815 int offset, double_regnum;
9817 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9818 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9819 strlen (name_buf));
9821 /* d0 is always the least significant half of q0. */
9822 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9823 offset = 8;
9824 else
9825 offset = 0;
9827 regcache_raw_write (regcache, double_regnum, buf + offset);
9828 offset = 8 - offset;
9829 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9832 static void
9833 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9834 int regnum, const gdb_byte *buf)
9836 const int num_regs = gdbarch_num_regs (gdbarch);
9837 char name_buf[4];
9838 gdb_byte reg_buf[8];
9839 int offset, double_regnum;
9841 gdb_assert (regnum >= num_regs);
9842 regnum -= num_regs;
9844 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9845 /* Quad-precision register. */
9846 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9847 else
9849 /* Single-precision register. */
9850 gdb_assert (regnum < 32);
9852 /* s0 is always the least significant half of d0. */
9853 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9854 offset = (regnum & 1) ? 0 : 4;
9855 else
9856 offset = (regnum & 1) ? 4 : 0;
9858 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9859 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9860 strlen (name_buf));
9862 regcache_raw_read (regcache, double_regnum, reg_buf);
9863 memcpy (reg_buf + offset, buf, 4);
9864 regcache_raw_write (regcache, double_regnum, reg_buf);
9868 static struct value *
9869 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9871 const int *reg_p = (const int *) baton;
9872 return value_of_register (*reg_p, frame);
9875 static enum gdb_osabi
9876 arm_elf_osabi_sniffer (bfd *abfd)
9878 unsigned int elfosabi;
9879 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9881 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9883 if (elfosabi == ELFOSABI_ARM)
9884 /* GNU tools use this value. Check note sections in this case,
9885 as well. */
9886 bfd_map_over_sections (abfd,
9887 generic_elf_osabi_sniff_abi_tag_sections,
9888 &osabi);
9890 /* Anything else will be handled by the generic ELF sniffer. */
9891 return osabi;
9894 static int
9895 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9896 struct reggroup *group)
9898 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9899 this, FPS register belongs to save_regroup, restore_reggroup, and
9900 all_reggroup, of course. */
9901 if (regnum == ARM_FPS_REGNUM)
9902 return (group == float_reggroup
9903 || group == save_reggroup
9904 || group == restore_reggroup
9905 || group == all_reggroup);
9906 else
9907 return default_register_reggroup_p (gdbarch, regnum, group);
9911 /* For backward-compatibility we allow two 'g' packet lengths with
9912 the remote protocol depending on whether FPA registers are
9913 supplied. M-profile targets do not have FPA registers, but some
9914 stubs already exist in the wild which use a 'g' packet which
9915 supplies them albeit with dummy values. The packet format which
9916 includes FPA registers should be considered deprecated for
9917 M-profile targets. */
9919 static void
9920 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9922 if (gdbarch_tdep (gdbarch)->is_m)
9924 /* If we know from the executable this is an M-profile target,
9925 cater for remote targets whose register set layout is the
9926 same as the FPA layout. */
9927 register_remote_g_packet_guess (gdbarch,
9928 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9929 (16 * INT_REGISTER_SIZE)
9930 + (8 * FP_REGISTER_SIZE)
9931 + (2 * INT_REGISTER_SIZE),
9932 tdesc_arm_with_m_fpa_layout);
9934 /* The regular M-profile layout. */
9935 register_remote_g_packet_guess (gdbarch,
9936 /* r0-r12,sp,lr,pc; xpsr */
9937 (16 * INT_REGISTER_SIZE)
9938 + INT_REGISTER_SIZE,
9939 tdesc_arm_with_m);
9941 /* M-profile plus M4F VFP. */
9942 register_remote_g_packet_guess (gdbarch,
9943 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9944 (16 * INT_REGISTER_SIZE)
9945 + (16 * VFP_REGISTER_SIZE)
9946 + (2 * INT_REGISTER_SIZE),
9947 tdesc_arm_with_m_vfp_d16);
9950 /* Otherwise we don't have a useful guess. */
9954 /* Initialize the current architecture based on INFO. If possible,
9955 re-use an architecture from ARCHES, which is a list of
9956 architectures already created during this debugging session.
9958 Called e.g. at program startup, when reading a core file, and when
9959 reading a binary file. */
9961 static struct gdbarch *
9962 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9964 struct gdbarch_tdep *tdep;
9965 struct gdbarch *gdbarch;
9966 struct gdbarch_list *best_arch;
9967 enum arm_abi_kind arm_abi = arm_abi_global;
9968 enum arm_float_model fp_model = arm_fp_model;
9969 struct tdesc_arch_data *tdesc_data = NULL;
9970 int i, is_m = 0;
9971 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9972 int have_wmmx_registers = 0;
9973 int have_neon = 0;
9974 int have_fpa_registers = 1;
9975 const struct target_desc *tdesc = info.target_desc;
9977 /* If we have an object to base this architecture on, try to determine
9978 its ABI. */
9980 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9982 int ei_osabi, e_flags;
9984 switch (bfd_get_flavour (info.abfd))
9986 case bfd_target_aout_flavour:
9987 /* Assume it's an old APCS-style ABI. */
9988 arm_abi = ARM_ABI_APCS;
9989 break;
9991 case bfd_target_coff_flavour:
9992 /* Assume it's an old APCS-style ABI. */
9993 /* XXX WinCE? */
9994 arm_abi = ARM_ABI_APCS;
9995 break;
9997 case bfd_target_elf_flavour:
9998 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9999 e_flags = elf_elfheader (info.abfd)->e_flags;
10001 if (ei_osabi == ELFOSABI_ARM)
10003 /* GNU tools used to use this value, but do not for EABI
10004 objects. There's nowhere to tag an EABI version
10005 anyway, so assume APCS. */
10006 arm_abi = ARM_ABI_APCS;
10008 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
10010 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10011 int attr_arch, attr_profile;
10013 switch (eabi_ver)
10015 case EF_ARM_EABI_UNKNOWN:
10016 /* Assume GNU tools. */
10017 arm_abi = ARM_ABI_APCS;
10018 break;
10020 case EF_ARM_EABI_VER4:
10021 case EF_ARM_EABI_VER5:
10022 arm_abi = ARM_ABI_AAPCS;
10023 /* EABI binaries default to VFP float ordering.
10024 They may also contain build attributes that can
10025 be used to identify if the VFP argument-passing
10026 ABI is in use. */
10027 if (fp_model == ARM_FLOAT_AUTO)
10029 #ifdef HAVE_ELF
10030 switch (bfd_elf_get_obj_attr_int (info.abfd,
10031 OBJ_ATTR_PROC,
10032 Tag_ABI_VFP_args))
10034 case AEABI_VFP_args_base:
10035 /* "The user intended FP parameter/result
10036 passing to conform to AAPCS, base
10037 variant". */
10038 fp_model = ARM_FLOAT_SOFT_VFP;
10039 break;
10040 case AEABI_VFP_args_vfp:
10041 /* "The user intended FP parameter/result
10042 passing to conform to AAPCS, VFP
10043 variant". */
10044 fp_model = ARM_FLOAT_VFP;
10045 break;
10046 case AEABI_VFP_args_toolchain:
10047 /* "The user intended FP parameter/result
10048 passing to conform to tool chain-specific
10049 conventions" - we don't know any such
10050 conventions, so leave it as "auto". */
10051 break;
10052 case AEABI_VFP_args_compatible:
10053 /* "Code is compatible with both the base
10054 and VFP variants; the user did not permit
10055 non-variadic functions to pass FP
10056 parameters/results" - leave it as
10057 "auto". */
10058 break;
10059 default:
10060 /* Attribute value not mentioned in the
10061 November 2012 ABI, so leave it as
10062 "auto". */
10063 break;
10065 #else
10066 fp_model = ARM_FLOAT_SOFT_VFP;
10067 #endif
10069 break;
10071 default:
10072 /* Leave it as "auto". */
10073 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10074 break;
10077 #ifdef HAVE_ELF
10078 /* Detect M-profile programs. This only works if the
10079 executable file includes build attributes; GCC does
10080 copy them to the executable, but e.g. RealView does
10081 not. */
10082 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10083 Tag_CPU_arch);
10084 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10085 OBJ_ATTR_PROC,
10086 Tag_CPU_arch_profile);
10087 /* GCC specifies the profile for v6-M; RealView only
10088 specifies the profile for architectures starting with
10089 V7 (as opposed to architectures with a tag
10090 numerically greater than TAG_CPU_ARCH_V7). */
10091 if (!tdesc_has_registers (tdesc)
10092 && (attr_arch == TAG_CPU_ARCH_V6_M
10093 || attr_arch == TAG_CPU_ARCH_V6S_M
10094 || attr_profile == 'M'))
10095 is_m = 1;
10096 #endif
10099 if (fp_model == ARM_FLOAT_AUTO)
10101 int e_flags = elf_elfheader (info.abfd)->e_flags;
10103 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10105 case 0:
10106 /* Leave it as "auto". Strictly speaking this case
10107 means FPA, but almost nobody uses that now, and
10108 many toolchains fail to set the appropriate bits
10109 for the floating-point model they use. */
10110 break;
10111 case EF_ARM_SOFT_FLOAT:
10112 fp_model = ARM_FLOAT_SOFT_FPA;
10113 break;
10114 case EF_ARM_VFP_FLOAT:
10115 fp_model = ARM_FLOAT_VFP;
10116 break;
10117 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10118 fp_model = ARM_FLOAT_SOFT_VFP;
10119 break;
10123 if (e_flags & EF_ARM_BE8)
10124 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10126 break;
10128 default:
10129 /* Leave it as "auto". */
10130 break;
10134 /* Check any target description for validity. */
10135 if (tdesc_has_registers (tdesc))
10137 /* For most registers we require GDB's default names; but also allow
10138 the numeric names for sp / lr / pc, as a convenience. */
10139 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10140 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10141 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10143 const struct tdesc_feature *feature;
10144 int valid_p;
10146 feature = tdesc_find_feature (tdesc,
10147 "org.gnu.gdb.arm.core");
10148 if (feature == NULL)
10150 feature = tdesc_find_feature (tdesc,
10151 "org.gnu.gdb.arm.m-profile");
10152 if (feature == NULL)
10153 return NULL;
10154 else
10155 is_m = 1;
10158 tdesc_data = tdesc_data_alloc ();
10160 valid_p = 1;
10161 for (i = 0; i < ARM_SP_REGNUM; i++)
10162 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10163 arm_register_names[i]);
10164 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10165 ARM_SP_REGNUM,
10166 arm_sp_names);
10167 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10168 ARM_LR_REGNUM,
10169 arm_lr_names);
10170 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10171 ARM_PC_REGNUM,
10172 arm_pc_names);
10173 if (is_m)
10174 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10175 ARM_PS_REGNUM, "xpsr");
10176 else
10177 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10178 ARM_PS_REGNUM, "cpsr");
10180 if (!valid_p)
10182 tdesc_data_cleanup (tdesc_data);
10183 return NULL;
10186 feature = tdesc_find_feature (tdesc,
10187 "org.gnu.gdb.arm.fpa");
10188 if (feature != NULL)
10190 valid_p = 1;
10191 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10192 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10193 arm_register_names[i]);
10194 if (!valid_p)
10196 tdesc_data_cleanup (tdesc_data);
10197 return NULL;
10200 else
10201 have_fpa_registers = 0;
10203 feature = tdesc_find_feature (tdesc,
10204 "org.gnu.gdb.xscale.iwmmxt");
10205 if (feature != NULL)
10207 static const char *const iwmmxt_names[] = {
10208 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10209 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10210 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10211 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10214 valid_p = 1;
10215 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10216 valid_p
10217 &= tdesc_numbered_register (feature, tdesc_data, i,
10218 iwmmxt_names[i - ARM_WR0_REGNUM]);
10220 /* Check for the control registers, but do not fail if they
10221 are missing. */
10222 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10223 tdesc_numbered_register (feature, tdesc_data, i,
10224 iwmmxt_names[i - ARM_WR0_REGNUM]);
10226 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10227 valid_p
10228 &= tdesc_numbered_register (feature, tdesc_data, i,
10229 iwmmxt_names[i - ARM_WR0_REGNUM]);
10231 if (!valid_p)
10233 tdesc_data_cleanup (tdesc_data);
10234 return NULL;
10237 have_wmmx_registers = 1;
10240 /* If we have a VFP unit, check whether the single precision registers
10241 are present. If not, then we will synthesize them as pseudo
10242 registers. */
10243 feature = tdesc_find_feature (tdesc,
10244 "org.gnu.gdb.arm.vfp");
10245 if (feature != NULL)
10247 static const char *const vfp_double_names[] = {
10248 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10249 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10250 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10251 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10254 /* Require the double precision registers. There must be either
10255 16 or 32. */
10256 valid_p = 1;
10257 for (i = 0; i < 32; i++)
10259 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10260 ARM_D0_REGNUM + i,
10261 vfp_double_names[i]);
10262 if (!valid_p)
10263 break;
10265 if (!valid_p && i == 16)
10266 valid_p = 1;
10268 /* Also require FPSCR. */
10269 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10270 ARM_FPSCR_REGNUM, "fpscr");
10271 if (!valid_p)
10273 tdesc_data_cleanup (tdesc_data);
10274 return NULL;
10277 if (tdesc_unnumbered_register (feature, "s0") == 0)
10278 have_vfp_pseudos = 1;
10280 vfp_register_count = i;
10282 /* If we have VFP, also check for NEON. The architecture allows
10283 NEON without VFP (integer vector operations only), but GDB
10284 does not support that. */
10285 feature = tdesc_find_feature (tdesc,
10286 "org.gnu.gdb.arm.neon");
10287 if (feature != NULL)
10289 /* NEON requires 32 double-precision registers. */
10290 if (i != 32)
10292 tdesc_data_cleanup (tdesc_data);
10293 return NULL;
10296 /* If there are quad registers defined by the stub, use
10297 their type; otherwise (normally) provide them with
10298 the default type. */
10299 if (tdesc_unnumbered_register (feature, "q0") == 0)
10300 have_neon_pseudos = 1;
10302 have_neon = 1;
10307 /* If there is already a candidate, use it. */
10308 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10309 best_arch != NULL;
10310 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10312 if (arm_abi != ARM_ABI_AUTO
10313 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10314 continue;
10316 if (fp_model != ARM_FLOAT_AUTO
10317 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10318 continue;
10320 /* There are various other properties in tdep that we do not
10321 need to check here: those derived from a target description,
10322 since gdbarches with a different target description are
10323 automatically disqualified. */
10325 /* Do check is_m, though, since it might come from the binary. */
10326 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10327 continue;
10329 /* Found a match. */
10330 break;
10333 if (best_arch != NULL)
10335 if (tdesc_data != NULL)
10336 tdesc_data_cleanup (tdesc_data);
10337 return best_arch->gdbarch;
10340 tdep = XCNEW (struct gdbarch_tdep);
10341 gdbarch = gdbarch_alloc (&info, tdep);
10343 /* Record additional information about the architecture we are defining.
10344 These are gdbarch discriminators, like the OSABI. */
10345 tdep->arm_abi = arm_abi;
10346 tdep->fp_model = fp_model;
10347 tdep->is_m = is_m;
10348 tdep->have_fpa_registers = have_fpa_registers;
10349 tdep->have_wmmx_registers = have_wmmx_registers;
10350 gdb_assert (vfp_register_count == 0
10351 || vfp_register_count == 16
10352 || vfp_register_count == 32);
10353 tdep->vfp_register_count = vfp_register_count;
10354 tdep->have_vfp_pseudos = have_vfp_pseudos;
10355 tdep->have_neon_pseudos = have_neon_pseudos;
10356 tdep->have_neon = have_neon;
10358 arm_register_g_packet_guesses (gdbarch);
10360 /* Breakpoints. */
10361 switch (info.byte_order_for_code)
10363 case BFD_ENDIAN_BIG:
10364 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10365 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10366 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10367 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10369 break;
10371 case BFD_ENDIAN_LITTLE:
10372 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10373 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10374 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10375 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10377 break;
10379 default:
10380 internal_error (__FILE__, __LINE__,
10381 _("arm_gdbarch_init: bad byte order for float format"));
10384 /* On ARM targets char defaults to unsigned. */
10385 set_gdbarch_char_signed (gdbarch, 0);
10387 /* Note: for displaced stepping, this includes the breakpoint, and one word
10388 of additional scratch space. This setting isn't used for anything beside
10389 displaced stepping at present. */
10390 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10392 /* This should be low enough for everything. */
10393 tdep->lowest_pc = 0x20;
10394 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10396 /* The default, for both APCS and AAPCS, is to return small
10397 structures in registers. */
10398 tdep->struct_return = reg_struct_return;
10400 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10401 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10403 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10405 /* Frame handling. */
10406 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10407 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10408 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10410 frame_base_set_default (gdbarch, &arm_normal_base);
10412 /* Address manipulation. */
10413 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10415 /* Advance PC across function entry code. */
10416 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10418 /* Detect whether PC is at a point where the stack has been destroyed. */
10419 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10421 /* Skip trampolines. */
10422 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10424 /* The stack grows downward. */
10425 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10427 /* Breakpoint manipulation. */
10428 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10429 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10430 arm_remote_breakpoint_from_pc);
10432 /* Information about registers, etc. */
10433 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10434 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10435 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10436 set_gdbarch_register_type (gdbarch, arm_register_type);
10437 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10439 /* This "info float" is FPA-specific. Use the generic version if we
10440 do not have FPA. */
10441 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10442 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10444 /* Internal <-> external register number maps. */
10445 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10446 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10448 set_gdbarch_register_name (gdbarch, arm_register_name);
10450 /* Returning results. */
10451 set_gdbarch_return_value (gdbarch, arm_return_value);
10453 /* Disassembly. */
10454 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10456 /* Minsymbol frobbing. */
10457 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10458 set_gdbarch_coff_make_msymbol_special (gdbarch,
10459 arm_coff_make_msymbol_special);
10460 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10462 /* Thumb-2 IT block support. */
10463 set_gdbarch_adjust_breakpoint_address (gdbarch,
10464 arm_adjust_breakpoint_address);
10466 /* Virtual tables. */
10467 set_gdbarch_vbit_in_delta (gdbarch, 1);
10469 /* Hook in the ABI-specific overrides, if they have been registered. */
10470 gdbarch_init_osabi (info, gdbarch);
10472 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10474 /* Add some default predicates. */
10475 if (is_m)
10476 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10477 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10478 dwarf2_append_unwinders (gdbarch);
10479 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10480 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10482 /* Now we have tuned the configuration, set a few final things,
10483 based on what the OS ABI has told us. */
10485 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10486 binaries are always marked. */
10487 if (tdep->arm_abi == ARM_ABI_AUTO)
10488 tdep->arm_abi = ARM_ABI_APCS;
10490 /* Watchpoints are not steppable. */
10491 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10493 /* We used to default to FPA for generic ARM, but almost nobody
10494 uses that now, and we now provide a way for the user to force
10495 the model. So default to the most useful variant. */
10496 if (tdep->fp_model == ARM_FLOAT_AUTO)
10497 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10499 if (tdep->jb_pc >= 0)
10500 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10502 /* Floating point sizes and format. */
10503 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10504 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10506 set_gdbarch_double_format
10507 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10508 set_gdbarch_long_double_format
10509 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10511 else
10513 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10514 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10517 if (have_vfp_pseudos)
10519 /* NOTE: These are the only pseudo registers used by
10520 the ARM target at the moment. If more are added, a
10521 little more care in numbering will be needed. */
10523 int num_pseudos = 32;
10524 if (have_neon_pseudos)
10525 num_pseudos += 16;
10526 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10527 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10528 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10531 if (tdesc_data)
10533 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10535 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10537 /* Override tdesc_register_type to adjust the types of VFP
10538 registers for NEON. */
10539 set_gdbarch_register_type (gdbarch, arm_register_type);
10542 /* Add standard register aliases. We add aliases even for those
10543 nanes which are used by the current architecture - it's simpler,
10544 and does no harm, since nothing ever lists user registers. */
10545 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10546 user_reg_add (gdbarch, arm_register_aliases[i].name,
10547 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10549 return gdbarch;
10552 static void
10553 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10555 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10557 if (tdep == NULL)
10558 return;
10560 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10561 (unsigned long) tdep->lowest_pc);
10564 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10566 void
10567 _initialize_arm_tdep (void)
10569 struct ui_file *stb;
10570 long length;
10571 struct cmd_list_element *new_set, *new_show;
10572 const char *setname;
10573 const char *setdesc;
10574 const char *const *regnames;
10575 int numregs, i, j;
10576 static char *helptext;
10577 char regdesc[1024], *rdptr = regdesc;
10578 size_t rest = sizeof (regdesc);
10580 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10582 arm_objfile_data_key
10583 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10585 /* Add ourselves to objfile event chain. */
10586 observer_attach_new_objfile (arm_exidx_new_objfile);
10587 arm_exidx_data_key
10588 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10590 /* Register an ELF OS ABI sniffer for ARM binaries. */
10591 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10592 bfd_target_elf_flavour,
10593 arm_elf_osabi_sniffer);
10595 /* Initialize the standard target descriptions. */
10596 initialize_tdesc_arm_with_m ();
10597 initialize_tdesc_arm_with_m_fpa_layout ();
10598 initialize_tdesc_arm_with_m_vfp_d16 ();
10599 initialize_tdesc_arm_with_iwmmxt ();
10600 initialize_tdesc_arm_with_vfpv2 ();
10601 initialize_tdesc_arm_with_vfpv3 ();
10602 initialize_tdesc_arm_with_neon ();
10604 /* Get the number of possible sets of register names defined in opcodes. */
10605 num_disassembly_options = get_arm_regname_num_options ();
10607 /* Add root prefix command for all "set arm"/"show arm" commands. */
10608 add_prefix_cmd ("arm", no_class, set_arm_command,
10609 _("Various ARM-specific commands."),
10610 &setarmcmdlist, "set arm ", 0, &setlist);
10612 add_prefix_cmd ("arm", no_class, show_arm_command,
10613 _("Various ARM-specific commands."),
10614 &showarmcmdlist, "show arm ", 0, &showlist);
10616 /* Sync the opcode insn printer with our register viewer. */
10617 parse_arm_disassembler_option ("reg-names-std");
10619 /* Initialize the array that will be passed to
10620 add_setshow_enum_cmd(). */
10621 valid_disassembly_styles = XNEWVEC (const char *,
10622 num_disassembly_options + 1);
10623 for (i = 0; i < num_disassembly_options; i++)
10625 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10626 valid_disassembly_styles[i] = setname;
10627 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10628 rdptr += length;
10629 rest -= length;
10630 /* When we find the default names, tell the disassembler to use
10631 them. */
10632 if (!strcmp (setname, "std"))
10634 disassembly_style = setname;
10635 set_arm_regname_option (i);
10638 /* Mark the end of valid options. */
10639 valid_disassembly_styles[num_disassembly_options] = NULL;
10641 /* Create the help text. */
10642 stb = mem_fileopen ();
10643 fprintf_unfiltered (stb, "%s%s%s",
10644 _("The valid values are:\n"),
10645 regdesc,
10646 _("The default is \"std\"."));
10647 helptext = ui_file_xstrdup (stb, NULL);
10648 ui_file_delete (stb);
10650 add_setshow_enum_cmd("disassembler", no_class,
10651 valid_disassembly_styles, &disassembly_style,
10652 _("Set the disassembly style."),
10653 _("Show the disassembly style."),
10654 helptext,
10655 set_disassembly_style_sfunc,
10656 NULL, /* FIXME: i18n: The disassembly style is
10657 \"%s\". */
10658 &setarmcmdlist, &showarmcmdlist);
10660 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10661 _("Set usage of ARM 32-bit mode."),
10662 _("Show usage of ARM 32-bit mode."),
10663 _("When off, a 26-bit PC will be used."),
10664 NULL,
10665 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10666 mode is %s. */
10667 &setarmcmdlist, &showarmcmdlist);
10669 /* Add a command to allow the user to force the FPU model. */
10670 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10671 _("Set the floating point type."),
10672 _("Show the floating point type."),
10673 _("auto - Determine the FP typefrom the OS-ABI.\n\
10674 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10675 fpa - FPA co-processor (GCC compiled).\n\
10676 softvfp - Software FP with pure-endian doubles.\n\
10677 vfp - VFP co-processor."),
10678 set_fp_model_sfunc, show_fp_model,
10679 &setarmcmdlist, &showarmcmdlist);
10681 /* Add a command to allow the user to force the ABI. */
10682 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10683 _("Set the ABI."),
10684 _("Show the ABI."),
10685 NULL, arm_set_abi, arm_show_abi,
10686 &setarmcmdlist, &showarmcmdlist);
10688 /* Add two commands to allow the user to force the assumed
10689 execution mode. */
10690 add_setshow_enum_cmd ("fallback-mode", class_support,
10691 arm_mode_strings, &arm_fallback_mode_string,
10692 _("Set the mode assumed when symbols are unavailable."),
10693 _("Show the mode assumed when symbols are unavailable."),
10694 NULL, NULL, arm_show_fallback_mode,
10695 &setarmcmdlist, &showarmcmdlist);
10696 add_setshow_enum_cmd ("force-mode", class_support,
10697 arm_mode_strings, &arm_force_mode_string,
10698 _("Set the mode assumed even when symbols are available."),
10699 _("Show the mode assumed even when symbols are available."),
10700 NULL, NULL, arm_show_force_mode,
10701 &setarmcmdlist, &showarmcmdlist);
10703 /* Debugging flag. */
10704 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10705 _("Set ARM debugging."),
10706 _("Show ARM debugging."),
10707 _("When on, arm-specific debugging is enabled."),
10708 NULL,
10709 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10710 &setdebuglist, &showdebuglist);
10713 /* ARM-reversible process record data structures. */
10715 #define ARM_INSN_SIZE_BYTES 4
10716 #define THUMB_INSN_SIZE_BYTES 2
10717 #define THUMB2_INSN_SIZE_BYTES 4
10720 /* Position of the bit within a 32-bit ARM instruction
10721 that defines whether the instruction is a load or store. */
10722 #define INSN_S_L_BIT_NUM 20
10724 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10725 do \
10727 unsigned int reg_len = LENGTH; \
10728 if (reg_len) \
10730 REGS = XNEWVEC (uint32_t, reg_len); \
10731 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10734 while (0)
10736 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10737 do \
10739 unsigned int mem_len = LENGTH; \
10740 if (mem_len) \
10742 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10743 memcpy(&MEMS->len, &RECORD_BUF[0], \
10744 sizeof(struct arm_mem_r) * LENGTH); \
10747 while (0)
10749 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10750 #define INSN_RECORDED(ARM_RECORD) \
10751 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10753 /* ARM memory record structure. */
10754 struct arm_mem_r
10756 uint32_t len; /* Record length. */
10757 uint32_t addr; /* Memory address. */
10760 /* ARM instruction record contains opcode of current insn
10761 and execution state (before entry to decode_insn()),
10762 contains list of to-be-modified registers and
10763 memory blocks (on return from decode_insn()). */
10765 typedef struct insn_decode_record_t
10767 struct gdbarch *gdbarch;
10768 struct regcache *regcache;
10769 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10770 uint32_t arm_insn; /* Should accommodate thumb. */
10771 uint32_t cond; /* Condition code. */
10772 uint32_t opcode; /* Insn opcode. */
10773 uint32_t decode; /* Insn decode bits. */
10774 uint32_t mem_rec_count; /* No of mem records. */
10775 uint32_t reg_rec_count; /* No of reg records. */
10776 uint32_t *arm_regs; /* Registers to be saved for this record. */
10777 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10778 } insn_decode_record;
10781 /* Checks ARM SBZ and SBO mandatory fields. */
10783 static int
10784 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10786 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10788 if (!len)
10789 return 1;
10791 if (!sbo)
10792 ones = ~ones;
10794 while (ones)
10796 if (!(ones & sbo))
10798 return 0;
10800 ones = ones >> 1;
10802 return 1;
10805 enum arm_record_result
10807 ARM_RECORD_SUCCESS = 0,
10808 ARM_RECORD_FAILURE = 1
10811 typedef enum
10813 ARM_RECORD_STRH=1,
10814 ARM_RECORD_STRD
10815 } arm_record_strx_t;
10817 typedef enum
10819 ARM_RECORD=1,
10820 THUMB_RECORD,
10821 THUMB2_RECORD
10822 } record_type_t;
10825 static int
10826 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10827 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10830 struct regcache *reg_cache = arm_insn_r->regcache;
10831 ULONGEST u_regval[2]= {0};
10833 uint32_t reg_src1 = 0, reg_src2 = 0;
10834 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10835 uint32_t opcode1 = 0;
10837 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10838 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10839 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10842 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10844 /* 1) Handle misc store, immediate offset. */
10845 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10846 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10847 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10848 regcache_raw_read_unsigned (reg_cache, reg_src1,
10849 &u_regval[0]);
10850 if (ARM_PC_REGNUM == reg_src1)
10852 /* If R15 was used as Rn, hence current PC+8. */
10853 u_regval[0] = u_regval[0] + 8;
10855 offset_8 = (immed_high << 4) | immed_low;
10856 /* Calculate target store address. */
10857 if (14 == arm_insn_r->opcode)
10859 tgt_mem_addr = u_regval[0] + offset_8;
10861 else
10863 tgt_mem_addr = u_regval[0] - offset_8;
10865 if (ARM_RECORD_STRH == str_type)
10867 record_buf_mem[0] = 2;
10868 record_buf_mem[1] = tgt_mem_addr;
10869 arm_insn_r->mem_rec_count = 1;
10871 else if (ARM_RECORD_STRD == str_type)
10873 record_buf_mem[0] = 4;
10874 record_buf_mem[1] = tgt_mem_addr;
10875 record_buf_mem[2] = 4;
10876 record_buf_mem[3] = tgt_mem_addr + 4;
10877 arm_insn_r->mem_rec_count = 2;
10880 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10882 /* 2) Store, register offset. */
10883 /* Get Rm. */
10884 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10885 /* Get Rn. */
10886 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10887 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10888 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10889 if (15 == reg_src2)
10891 /* If R15 was used as Rn, hence current PC+8. */
10892 u_regval[0] = u_regval[0] + 8;
10894 /* Calculate target store address, Rn +/- Rm, register offset. */
10895 if (12 == arm_insn_r->opcode)
10897 tgt_mem_addr = u_regval[0] + u_regval[1];
10899 else
10901 tgt_mem_addr = u_regval[1] - u_regval[0];
10903 if (ARM_RECORD_STRH == str_type)
10905 record_buf_mem[0] = 2;
10906 record_buf_mem[1] = tgt_mem_addr;
10907 arm_insn_r->mem_rec_count = 1;
10909 else if (ARM_RECORD_STRD == str_type)
10911 record_buf_mem[0] = 4;
10912 record_buf_mem[1] = tgt_mem_addr;
10913 record_buf_mem[2] = 4;
10914 record_buf_mem[3] = tgt_mem_addr + 4;
10915 arm_insn_r->mem_rec_count = 2;
10918 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10919 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10921 /* 3) Store, immediate pre-indexed. */
10922 /* 5) Store, immediate post-indexed. */
10923 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10924 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10925 offset_8 = (immed_high << 4) | immed_low;
10926 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10927 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10928 /* Calculate target store address, Rn +/- Rm, register offset. */
10929 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10931 tgt_mem_addr = u_regval[0] + offset_8;
10933 else
10935 tgt_mem_addr = u_regval[0] - offset_8;
10937 if (ARM_RECORD_STRH == str_type)
10939 record_buf_mem[0] = 2;
10940 record_buf_mem[1] = tgt_mem_addr;
10941 arm_insn_r->mem_rec_count = 1;
10943 else if (ARM_RECORD_STRD == str_type)
10945 record_buf_mem[0] = 4;
10946 record_buf_mem[1] = tgt_mem_addr;
10947 record_buf_mem[2] = 4;
10948 record_buf_mem[3] = tgt_mem_addr + 4;
10949 arm_insn_r->mem_rec_count = 2;
10951 /* Record Rn also as it changes. */
10952 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10953 arm_insn_r->reg_rec_count = 1;
10955 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10956 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10958 /* 4) Store, register pre-indexed. */
10959 /* 6) Store, register post -indexed. */
10960 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10961 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10962 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10963 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10964 /* Calculate target store address, Rn +/- Rm, register offset. */
10965 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10967 tgt_mem_addr = u_regval[0] + u_regval[1];
10969 else
10971 tgt_mem_addr = u_regval[1] - u_regval[0];
10973 if (ARM_RECORD_STRH == str_type)
10975 record_buf_mem[0] = 2;
10976 record_buf_mem[1] = tgt_mem_addr;
10977 arm_insn_r->mem_rec_count = 1;
10979 else if (ARM_RECORD_STRD == str_type)
10981 record_buf_mem[0] = 4;
10982 record_buf_mem[1] = tgt_mem_addr;
10983 record_buf_mem[2] = 4;
10984 record_buf_mem[3] = tgt_mem_addr + 4;
10985 arm_insn_r->mem_rec_count = 2;
10987 /* Record Rn also as it changes. */
10988 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10989 arm_insn_r->reg_rec_count = 1;
10991 return 0;
10994 /* Handling ARM extension space insns. */
10996 static int
10997 arm_record_extension_space (insn_decode_record *arm_insn_r)
10999 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
11000 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11001 uint32_t record_buf[8], record_buf_mem[8];
11002 uint32_t reg_src1 = 0;
11003 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
11004 struct regcache *reg_cache = arm_insn_r->regcache;
11005 ULONGEST u_regval = 0;
11007 gdb_assert (!INSN_RECORDED(arm_insn_r));
11008 /* Handle unconditional insn extension space. */
11010 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11011 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11012 if (arm_insn_r->cond)
11014 /* PLD has no affect on architectural state, it just affects
11015 the caches. */
11016 if (5 == ((opcode1 & 0xE0) >> 5))
11018 /* BLX(1) */
11019 record_buf[0] = ARM_PS_REGNUM;
11020 record_buf[1] = ARM_LR_REGNUM;
11021 arm_insn_r->reg_rec_count = 2;
11023 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11027 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11028 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11030 ret = -1;
11031 /* Undefined instruction on ARM V5; need to handle if later
11032 versions define it. */
11035 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11036 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11037 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11039 /* Handle arithmetic insn extension space. */
11040 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11041 && !INSN_RECORDED(arm_insn_r))
11043 /* Handle MLA(S) and MUL(S). */
11044 if (0 <= insn_op1 && 3 >= insn_op1)
11046 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11047 record_buf[1] = ARM_PS_REGNUM;
11048 arm_insn_r->reg_rec_count = 2;
11050 else if (4 <= insn_op1 && 15 >= insn_op1)
11052 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11053 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11054 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11055 record_buf[2] = ARM_PS_REGNUM;
11056 arm_insn_r->reg_rec_count = 3;
11060 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11061 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11062 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11064 /* Handle control insn extension space. */
11066 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11067 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11069 if (!bit (arm_insn_r->arm_insn,25))
11071 if (!bits (arm_insn_r->arm_insn, 4, 7))
11073 if ((0 == insn_op1) || (2 == insn_op1))
11075 /* MRS. */
11076 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11077 arm_insn_r->reg_rec_count = 1;
11079 else if (1 == insn_op1)
11081 /* CSPR is going to be changed. */
11082 record_buf[0] = ARM_PS_REGNUM;
11083 arm_insn_r->reg_rec_count = 1;
11085 else if (3 == insn_op1)
11087 /* SPSR is going to be changed. */
11088 /* We need to get SPSR value, which is yet to be done. */
11089 printf_unfiltered (_("Process record does not support "
11090 "instruction 0x%0x at address %s.\n"),
11091 arm_insn_r->arm_insn,
11092 paddress (arm_insn_r->gdbarch,
11093 arm_insn_r->this_addr));
11094 return -1;
11097 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11099 if (1 == insn_op1)
11101 /* BX. */
11102 record_buf[0] = ARM_PS_REGNUM;
11103 arm_insn_r->reg_rec_count = 1;
11105 else if (3 == insn_op1)
11107 /* CLZ. */
11108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11109 arm_insn_r->reg_rec_count = 1;
11112 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11114 /* BLX. */
11115 record_buf[0] = ARM_PS_REGNUM;
11116 record_buf[1] = ARM_LR_REGNUM;
11117 arm_insn_r->reg_rec_count = 2;
11119 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11121 /* QADD, QSUB, QDADD, QDSUB */
11122 record_buf[0] = ARM_PS_REGNUM;
11123 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11124 arm_insn_r->reg_rec_count = 2;
11126 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11128 /* BKPT. */
11129 record_buf[0] = ARM_PS_REGNUM;
11130 record_buf[1] = ARM_LR_REGNUM;
11131 arm_insn_r->reg_rec_count = 2;
11133 /* Save SPSR also;how? */
11134 printf_unfiltered (_("Process record does not support "
11135 "instruction 0x%0x at address %s.\n"),
11136 arm_insn_r->arm_insn,
11137 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11138 return -1;
11140 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11141 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11142 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11143 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11146 if (0 == insn_op1 || 1 == insn_op1)
11148 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11149 /* We dont do optimization for SMULW<y> where we
11150 need only Rd. */
11151 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11152 record_buf[1] = ARM_PS_REGNUM;
11153 arm_insn_r->reg_rec_count = 2;
11155 else if (2 == insn_op1)
11157 /* SMLAL<x><y>. */
11158 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11159 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11160 arm_insn_r->reg_rec_count = 2;
11162 else if (3 == insn_op1)
11164 /* SMUL<x><y>. */
11165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11166 arm_insn_r->reg_rec_count = 1;
11170 else
11172 /* MSR : immediate form. */
11173 if (1 == insn_op1)
11175 /* CSPR is going to be changed. */
11176 record_buf[0] = ARM_PS_REGNUM;
11177 arm_insn_r->reg_rec_count = 1;
11179 else if (3 == insn_op1)
11181 /* SPSR is going to be changed. */
11182 /* we need to get SPSR value, which is yet to be done */
11183 printf_unfiltered (_("Process record does not support "
11184 "instruction 0x%0x at address %s.\n"),
11185 arm_insn_r->arm_insn,
11186 paddress (arm_insn_r->gdbarch,
11187 arm_insn_r->this_addr));
11188 return -1;
11193 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11194 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11195 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11197 /* Handle load/store insn extension space. */
11199 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11200 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11201 && !INSN_RECORDED(arm_insn_r))
11203 /* SWP/SWPB. */
11204 if (0 == insn_op1)
11206 /* These insn, changes register and memory as well. */
11207 /* SWP or SWPB insn. */
11208 /* Get memory address given by Rn. */
11209 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11210 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11211 /* SWP insn ?, swaps word. */
11212 if (8 == arm_insn_r->opcode)
11214 record_buf_mem[0] = 4;
11216 else
11218 /* SWPB insn, swaps only byte. */
11219 record_buf_mem[0] = 1;
11221 record_buf_mem[1] = u_regval;
11222 arm_insn_r->mem_rec_count = 1;
11223 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11224 arm_insn_r->reg_rec_count = 1;
11226 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11228 /* STRH. */
11229 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11230 ARM_RECORD_STRH);
11232 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11234 /* LDRD. */
11235 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11236 record_buf[1] = record_buf[0] + 1;
11237 arm_insn_r->reg_rec_count = 2;
11239 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11241 /* STRD. */
11242 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11243 ARM_RECORD_STRD);
11245 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11247 /* LDRH, LDRSB, LDRSH. */
11248 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11249 arm_insn_r->reg_rec_count = 1;
11254 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11255 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11256 && !INSN_RECORDED(arm_insn_r))
11258 ret = -1;
11259 /* Handle coprocessor insn extension space. */
11262 /* To be done for ARMv5 and later; as of now we return -1. */
11263 if (-1 == ret)
11264 printf_unfiltered (_("Process record does not support instruction x%0x "
11265 "at address %s.\n"),arm_insn_r->arm_insn,
11266 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11269 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11270 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11272 return ret;
11275 /* Handling opcode 000 insns. */
11277 static int
11278 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11280 struct regcache *reg_cache = arm_insn_r->regcache;
11281 uint32_t record_buf[8], record_buf_mem[8];
11282 ULONGEST u_regval[2] = {0};
11284 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11285 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11286 uint32_t opcode1 = 0;
11288 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11289 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11290 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11292 /* Data processing insn /multiply insn. */
11293 if (9 == arm_insn_r->decode
11294 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11295 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11297 /* Handle multiply instructions. */
11298 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11299 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11301 /* Handle MLA and MUL. */
11302 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11303 record_buf[1] = ARM_PS_REGNUM;
11304 arm_insn_r->reg_rec_count = 2;
11306 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11308 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11309 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11310 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11311 record_buf[2] = ARM_PS_REGNUM;
11312 arm_insn_r->reg_rec_count = 3;
11315 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11316 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11318 /* Handle misc load insns, as 20th bit (L = 1). */
11319 /* LDR insn has a capability to do branching, if
11320 MOV LR, PC is precceded by LDR insn having Rn as R15
11321 in that case, it emulates branch and link insn, and hence we
11322 need to save CSPR and PC as well. I am not sure this is right
11323 place; as opcode = 010 LDR insn make this happen, if R15 was
11324 used. */
11325 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11326 if (15 != reg_dest)
11328 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11329 arm_insn_r->reg_rec_count = 1;
11331 else
11333 record_buf[0] = reg_dest;
11334 record_buf[1] = ARM_PS_REGNUM;
11335 arm_insn_r->reg_rec_count = 2;
11338 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11339 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11340 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11341 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11343 /* Handle MSR insn. */
11344 if (9 == arm_insn_r->opcode)
11346 /* CSPR is going to be changed. */
11347 record_buf[0] = ARM_PS_REGNUM;
11348 arm_insn_r->reg_rec_count = 1;
11350 else
11352 /* SPSR is going to be changed. */
11353 /* How to read SPSR value? */
11354 printf_unfiltered (_("Process record does not support instruction "
11355 "0x%0x at address %s.\n"),
11356 arm_insn_r->arm_insn,
11357 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11358 return -1;
11361 else if (9 == arm_insn_r->decode
11362 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11363 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11365 /* Handling SWP, SWPB. */
11366 /* These insn, changes register and memory as well. */
11367 /* SWP or SWPB insn. */
11369 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11370 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11371 /* SWP insn ?, swaps word. */
11372 if (8 == arm_insn_r->opcode)
11374 record_buf_mem[0] = 4;
11376 else
11378 /* SWPB insn, swaps only byte. */
11379 record_buf_mem[0] = 1;
11381 record_buf_mem[1] = u_regval[0];
11382 arm_insn_r->mem_rec_count = 1;
11383 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11384 arm_insn_r->reg_rec_count = 1;
11386 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11387 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11389 /* Handle BLX, branch and link/exchange. */
11390 if (9 == arm_insn_r->opcode)
11392 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11393 and R14 stores the return address. */
11394 record_buf[0] = ARM_PS_REGNUM;
11395 record_buf[1] = ARM_LR_REGNUM;
11396 arm_insn_r->reg_rec_count = 2;
11399 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11401 /* Handle enhanced software breakpoint insn, BKPT. */
11402 /* CPSR is changed to be executed in ARM state, disabling normal
11403 interrupts, entering abort mode. */
11404 /* According to high vector configuration PC is set. */
11405 /* user hit breakpoint and type reverse, in
11406 that case, we need to go back with previous CPSR and
11407 Program Counter. */
11408 record_buf[0] = ARM_PS_REGNUM;
11409 record_buf[1] = ARM_LR_REGNUM;
11410 arm_insn_r->reg_rec_count = 2;
11412 /* Save SPSR also; how? */
11413 printf_unfiltered (_("Process record does not support instruction "
11414 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11415 paddress (arm_insn_r->gdbarch,
11416 arm_insn_r->this_addr));
11417 return -1;
11419 else if (11 == arm_insn_r->decode
11420 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11422 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11424 /* Handle str(x) insn */
11425 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11426 ARM_RECORD_STRH);
11428 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11429 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11431 /* Handle BX, branch and link/exchange. */
11432 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11433 record_buf[0] = ARM_PS_REGNUM;
11434 arm_insn_r->reg_rec_count = 1;
11436 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11437 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11438 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11440 /* Count leading zeros: CLZ. */
11441 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11442 arm_insn_r->reg_rec_count = 1;
11444 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11445 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11446 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11447 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11450 /* Handle MRS insn. */
11451 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11452 arm_insn_r->reg_rec_count = 1;
11454 else if (arm_insn_r->opcode <= 15)
11456 /* Normal data processing insns. */
11457 /* Out of 11 shifter operands mode, all the insn modifies destination
11458 register, which is specified by 13-16 decode. */
11459 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11460 record_buf[1] = ARM_PS_REGNUM;
11461 arm_insn_r->reg_rec_count = 2;
11463 else
11465 return -1;
11468 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11469 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11470 return 0;
11473 /* Handling opcode 001 insns. */
11475 static int
11476 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11478 uint32_t record_buf[8], record_buf_mem[8];
11480 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11481 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11483 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11484 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11485 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11488 /* Handle MSR insn. */
11489 if (9 == arm_insn_r->opcode)
11491 /* CSPR is going to be changed. */
11492 record_buf[0] = ARM_PS_REGNUM;
11493 arm_insn_r->reg_rec_count = 1;
11495 else
11497 /* SPSR is going to be changed. */
11500 else if (arm_insn_r->opcode <= 15)
11502 /* Normal data processing insns. */
11503 /* Out of 11 shifter operands mode, all the insn modifies destination
11504 register, which is specified by 13-16 decode. */
11505 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11506 record_buf[1] = ARM_PS_REGNUM;
11507 arm_insn_r->reg_rec_count = 2;
11509 else
11511 return -1;
11514 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11515 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11516 return 0;
11519 /* Handle ARM mode instructions with opcode 010. */
11521 static int
11522 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11524 struct regcache *reg_cache = arm_insn_r->regcache;
11526 uint32_t reg_base , reg_dest;
11527 uint32_t offset_12, tgt_mem_addr;
11528 uint32_t record_buf[8], record_buf_mem[8];
11529 unsigned char wback;
11530 ULONGEST u_regval;
11532 /* Calculate wback. */
11533 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11534 || (bit (arm_insn_r->arm_insn, 21) == 1);
11536 arm_insn_r->reg_rec_count = 0;
11537 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11539 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11541 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11542 and LDRT. */
11544 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11545 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11547 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11548 preceeds a LDR instruction having R15 as reg_base, it
11549 emulates a branch and link instruction, and hence we need to save
11550 CPSR and PC as well. */
11551 if (ARM_PC_REGNUM == reg_dest)
11552 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11554 /* If wback is true, also save the base register, which is going to be
11555 written to. */
11556 if (wback)
11557 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11559 else
11561 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11563 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11564 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11566 /* Handle bit U. */
11567 if (bit (arm_insn_r->arm_insn, 23))
11569 /* U == 1: Add the offset. */
11570 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11572 else
11574 /* U == 0: subtract the offset. */
11575 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11578 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11579 bytes. */
11580 if (bit (arm_insn_r->arm_insn, 22))
11582 /* STRB and STRBT: 1 byte. */
11583 record_buf_mem[0] = 1;
11585 else
11587 /* STR and STRT: 4 bytes. */
11588 record_buf_mem[0] = 4;
11591 /* Handle bit P. */
11592 if (bit (arm_insn_r->arm_insn, 24))
11593 record_buf_mem[1] = tgt_mem_addr;
11594 else
11595 record_buf_mem[1] = (uint32_t) u_regval;
11597 arm_insn_r->mem_rec_count = 1;
11599 /* If wback is true, also save the base register, which is going to be
11600 written to. */
11601 if (wback)
11602 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11605 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11606 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11607 return 0;
11610 /* Handling opcode 011 insns. */
11612 static int
11613 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11615 struct regcache *reg_cache = arm_insn_r->regcache;
11617 uint32_t shift_imm = 0;
11618 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11619 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11620 uint32_t record_buf[8], record_buf_mem[8];
11622 LONGEST s_word;
11623 ULONGEST u_regval[2];
11625 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11626 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11628 /* Handle enhanced store insns and LDRD DSP insn,
11629 order begins according to addressing modes for store insns
11630 STRH insn. */
11632 /* LDR or STR? */
11633 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11635 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11636 /* LDR insn has a capability to do branching, if
11637 MOV LR, PC is precedded by LDR insn having Rn as R15
11638 in that case, it emulates branch and link insn, and hence we
11639 need to save CSPR and PC as well. */
11640 if (15 != reg_dest)
11642 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11643 arm_insn_r->reg_rec_count = 1;
11645 else
11647 record_buf[0] = reg_dest;
11648 record_buf[1] = ARM_PS_REGNUM;
11649 arm_insn_r->reg_rec_count = 2;
11652 else
11654 if (! bits (arm_insn_r->arm_insn, 4, 11))
11656 /* Store insn, register offset and register pre-indexed,
11657 register post-indexed. */
11658 /* Get Rm. */
11659 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11660 /* Get Rn. */
11661 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11662 regcache_raw_read_unsigned (reg_cache, reg_src1
11663 , &u_regval[0]);
11664 regcache_raw_read_unsigned (reg_cache, reg_src2
11665 , &u_regval[1]);
11666 if (15 == reg_src2)
11668 /* If R15 was used as Rn, hence current PC+8. */
11669 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11670 u_regval[0] = u_regval[0] + 8;
11672 /* Calculate target store address, Rn +/- Rm, register offset. */
11673 /* U == 1. */
11674 if (bit (arm_insn_r->arm_insn, 23))
11676 tgt_mem_addr = u_regval[0] + u_regval[1];
11678 else
11680 tgt_mem_addr = u_regval[1] - u_regval[0];
11683 switch (arm_insn_r->opcode)
11685 /* STR. */
11686 case 8:
11687 case 12:
11688 /* STR. */
11689 case 9:
11690 case 13:
11691 /* STRT. */
11692 case 1:
11693 case 5:
11694 /* STR. */
11695 case 0:
11696 case 4:
11697 record_buf_mem[0] = 4;
11698 break;
11700 /* STRB. */
11701 case 10:
11702 case 14:
11703 /* STRB. */
11704 case 11:
11705 case 15:
11706 /* STRBT. */
11707 case 3:
11708 case 7:
11709 /* STRB. */
11710 case 2:
11711 case 6:
11712 record_buf_mem[0] = 1;
11713 break;
11715 default:
11716 gdb_assert_not_reached ("no decoding pattern found");
11717 break;
11719 record_buf_mem[1] = tgt_mem_addr;
11720 arm_insn_r->mem_rec_count = 1;
11722 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11723 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11724 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11725 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11726 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11727 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11730 /* Rn is going to be changed in pre-indexed mode and
11731 post-indexed mode as well. */
11732 record_buf[0] = reg_src2;
11733 arm_insn_r->reg_rec_count = 1;
11736 else
11738 /* Store insn, scaled register offset; scaled pre-indexed. */
11739 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11740 /* Get Rm. */
11741 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11742 /* Get Rn. */
11743 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11744 /* Get shift_imm. */
11745 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11746 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11747 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11748 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11749 /* Offset_12 used as shift. */
11750 switch (offset_12)
11752 case 0:
11753 /* Offset_12 used as index. */
11754 offset_12 = u_regval[0] << shift_imm;
11755 break;
11757 case 1:
11758 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11759 break;
11761 case 2:
11762 if (!shift_imm)
11764 if (bit (u_regval[0], 31))
11766 offset_12 = 0xFFFFFFFF;
11768 else
11770 offset_12 = 0;
11773 else
11775 /* This is arithmetic shift. */
11776 offset_12 = s_word >> shift_imm;
11778 break;
11780 case 3:
11781 if (!shift_imm)
11783 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11784 &u_regval[1]);
11785 /* Get C flag value and shift it by 31. */
11786 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11787 | (u_regval[0]) >> 1);
11789 else
11791 offset_12 = (u_regval[0] >> shift_imm) \
11792 | (u_regval[0] <<
11793 (sizeof(uint32_t) - shift_imm));
11795 break;
11797 default:
11798 gdb_assert_not_reached ("no decoding pattern found");
11799 break;
11802 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11803 /* bit U set. */
11804 if (bit (arm_insn_r->arm_insn, 23))
11806 tgt_mem_addr = u_regval[1] + offset_12;
11808 else
11810 tgt_mem_addr = u_regval[1] - offset_12;
11813 switch (arm_insn_r->opcode)
11815 /* STR. */
11816 case 8:
11817 case 12:
11818 /* STR. */
11819 case 9:
11820 case 13:
11821 /* STRT. */
11822 case 1:
11823 case 5:
11824 /* STR. */
11825 case 0:
11826 case 4:
11827 record_buf_mem[0] = 4;
11828 break;
11830 /* STRB. */
11831 case 10:
11832 case 14:
11833 /* STRB. */
11834 case 11:
11835 case 15:
11836 /* STRBT. */
11837 case 3:
11838 case 7:
11839 /* STRB. */
11840 case 2:
11841 case 6:
11842 record_buf_mem[0] = 1;
11843 break;
11845 default:
11846 gdb_assert_not_reached ("no decoding pattern found");
11847 break;
11849 record_buf_mem[1] = tgt_mem_addr;
11850 arm_insn_r->mem_rec_count = 1;
11852 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11853 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11854 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11855 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11856 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11857 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11860 /* Rn is going to be changed in register scaled pre-indexed
11861 mode,and scaled post indexed mode. */
11862 record_buf[0] = reg_src2;
11863 arm_insn_r->reg_rec_count = 1;
11868 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11869 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11870 return 0;
11873 /* Handle ARM mode instructions with opcode 100. */
11875 static int
11876 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11878 struct regcache *reg_cache = arm_insn_r->regcache;
11879 uint32_t register_count = 0, register_bits;
11880 uint32_t reg_base, addr_mode;
11881 uint32_t record_buf[24], record_buf_mem[48];
11882 uint32_t wback;
11883 ULONGEST u_regval;
11885 /* Fetch the list of registers. */
11886 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11887 arm_insn_r->reg_rec_count = 0;
11889 /* Fetch the base register that contains the address we are loading data
11890 to. */
11891 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11893 /* Calculate wback. */
11894 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11896 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11898 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11900 /* Find out which registers are going to be loaded from memory. */
11901 while (register_bits)
11903 if (register_bits & 0x00000001)
11904 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11905 register_bits = register_bits >> 1;
11906 register_count++;
11910 /* If wback is true, also save the base register, which is going to be
11911 written to. */
11912 if (wback)
11913 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11915 /* Save the CPSR register. */
11916 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11918 else
11920 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11922 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11924 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11926 /* Find out how many registers are going to be stored to memory. */
11927 while (register_bits)
11929 if (register_bits & 0x00000001)
11930 register_count++;
11931 register_bits = register_bits >> 1;
11934 switch (addr_mode)
11936 /* STMDA (STMED): Decrement after. */
11937 case 0:
11938 record_buf_mem[1] = (uint32_t) u_regval
11939 - register_count * INT_REGISTER_SIZE + 4;
11940 break;
11941 /* STM (STMIA, STMEA): Increment after. */
11942 case 1:
11943 record_buf_mem[1] = (uint32_t) u_regval;
11944 break;
11945 /* STMDB (STMFD): Decrement before. */
11946 case 2:
11947 record_buf_mem[1] = (uint32_t) u_regval
11948 - register_count * INT_REGISTER_SIZE;
11949 break;
11950 /* STMIB (STMFA): Increment before. */
11951 case 3:
11952 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11953 break;
11954 default:
11955 gdb_assert_not_reached ("no decoding pattern found");
11956 break;
11959 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11960 arm_insn_r->mem_rec_count = 1;
11962 /* If wback is true, also save the base register, which is going to be
11963 written to. */
11964 if (wback)
11965 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11968 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11969 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11970 return 0;
11973 /* Handling opcode 101 insns. */
11975 static int
11976 arm_record_b_bl (insn_decode_record *arm_insn_r)
11978 uint32_t record_buf[8];
11980 /* Handle B, BL, BLX(1) insns. */
11981 /* B simply branches so we do nothing here. */
11982 /* Note: BLX(1) doesnt fall here but instead it falls into
11983 extension space. */
11984 if (bit (arm_insn_r->arm_insn, 24))
11986 record_buf[0] = ARM_LR_REGNUM;
11987 arm_insn_r->reg_rec_count = 1;
11990 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11992 return 0;
11995 /* Handling opcode 110 insns. */
11997 static int
11998 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
12000 printf_unfiltered (_("Process record does not support instruction "
12001 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12002 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12004 return -1;
12007 /* Record handler for vector data transfer instructions. */
12009 static int
12010 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
12012 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12013 uint32_t record_buf[4];
12015 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12016 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12017 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12018 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12019 bit_l = bit (arm_insn_r->arm_insn, 20);
12020 bit_c = bit (arm_insn_r->arm_insn, 8);
12022 /* Handle VMOV instruction. */
12023 if (bit_l && bit_c)
12025 record_buf[0] = reg_t;
12026 arm_insn_r->reg_rec_count = 1;
12028 else if (bit_l && !bit_c)
12030 /* Handle VMOV instruction. */
12031 if (bits_a == 0x00)
12033 if (bit (arm_insn_r->arm_insn, 20))
12034 record_buf[0] = reg_t;
12035 else
12036 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12037 (reg_v << 1));
12039 arm_insn_r->reg_rec_count = 1;
12041 /* Handle VMRS instruction. */
12042 else if (bits_a == 0x07)
12044 if (reg_t == 15)
12045 reg_t = ARM_PS_REGNUM;
12047 record_buf[0] = reg_t;
12048 arm_insn_r->reg_rec_count = 1;
12051 else if (!bit_l && !bit_c)
12053 /* Handle VMOV instruction. */
12054 if (bits_a == 0x00)
12056 if (bit (arm_insn_r->arm_insn, 20))
12057 record_buf[0] = reg_t;
12058 else
12059 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12060 (reg_v << 1));
12062 arm_insn_r->reg_rec_count = 1;
12064 /* Handle VMSR instruction. */
12065 else if (bits_a == 0x07)
12067 record_buf[0] = ARM_FPSCR_REGNUM;
12068 arm_insn_r->reg_rec_count = 1;
12071 else if (!bit_l && bit_c)
12073 /* Handle VMOV instruction. */
12074 if (!(bits_a & 0x04))
12076 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12077 + ARM_D0_REGNUM;
12078 arm_insn_r->reg_rec_count = 1;
12080 /* Handle VDUP instruction. */
12081 else
12083 if (bit (arm_insn_r->arm_insn, 21))
12085 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12086 record_buf[0] = reg_v + ARM_D0_REGNUM;
12087 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12088 arm_insn_r->reg_rec_count = 2;
12090 else
12092 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12093 record_buf[0] = reg_v + ARM_D0_REGNUM;
12094 arm_insn_r->reg_rec_count = 1;
12099 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12100 return 0;
12103 /* Record handler for extension register load/store instructions. */
12105 static int
12106 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12108 uint32_t opcode, single_reg;
12109 uint8_t op_vldm_vstm;
12110 uint32_t record_buf[8], record_buf_mem[128];
12111 ULONGEST u_regval = 0;
12113 struct regcache *reg_cache = arm_insn_r->regcache;
12114 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12116 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12117 single_reg = bit (arm_insn_r->arm_insn, 8);
12118 op_vldm_vstm = opcode & 0x1b;
12120 /* Handle VMOV instructions. */
12121 if ((opcode & 0x1e) == 0x04)
12123 if (bit (arm_insn_r->arm_insn, 4))
12125 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12126 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12127 arm_insn_r->reg_rec_count = 2;
12129 else
12131 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12132 | bit (arm_insn_r->arm_insn, 5);
12134 if (!single_reg)
12136 record_buf[0] = num_regs + reg_m;
12137 record_buf[1] = num_regs + reg_m + 1;
12138 arm_insn_r->reg_rec_count = 2;
12140 else
12142 record_buf[0] = reg_m + ARM_D0_REGNUM;
12143 arm_insn_r->reg_rec_count = 1;
12147 /* Handle VSTM and VPUSH instructions. */
12148 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12149 || op_vldm_vstm == 0x12)
12151 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12152 uint32_t memory_index = 0;
12154 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12155 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12156 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12157 imm_off32 = imm_off8 << 24;
12158 memory_count = imm_off8;
12160 if (bit (arm_insn_r->arm_insn, 23))
12161 start_address = u_regval;
12162 else
12163 start_address = u_regval - imm_off32;
12165 if (bit (arm_insn_r->arm_insn, 21))
12167 record_buf[0] = reg_rn;
12168 arm_insn_r->reg_rec_count = 1;
12171 while (memory_count > 0)
12173 if (!single_reg)
12175 record_buf_mem[memory_index] = start_address;
12176 record_buf_mem[memory_index + 1] = 4;
12177 start_address = start_address + 4;
12178 memory_index = memory_index + 2;
12180 else
12182 record_buf_mem[memory_index] = start_address;
12183 record_buf_mem[memory_index + 1] = 4;
12184 record_buf_mem[memory_index + 2] = start_address + 4;
12185 record_buf_mem[memory_index + 3] = 4;
12186 start_address = start_address + 8;
12187 memory_index = memory_index + 4;
12189 memory_count--;
12191 arm_insn_r->mem_rec_count = (memory_index >> 1);
12193 /* Handle VLDM instructions. */
12194 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12195 || op_vldm_vstm == 0x13)
12197 uint32_t reg_count, reg_vd;
12198 uint32_t reg_index = 0;
12200 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12201 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12203 if (single_reg)
12204 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12205 else
12206 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12208 if (bit (arm_insn_r->arm_insn, 21))
12209 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12211 while (reg_count > 0)
12213 if (single_reg)
12214 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12215 else
12216 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12218 reg_count--;
12220 arm_insn_r->reg_rec_count = reg_index;
12222 /* VSTR Vector store register. */
12223 else if ((opcode & 0x13) == 0x10)
12225 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12226 uint32_t memory_index = 0;
12228 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12229 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12230 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12231 imm_off32 = imm_off8 << 24;
12232 memory_count = imm_off8;
12234 if (bit (arm_insn_r->arm_insn, 23))
12235 start_address = u_regval + imm_off32;
12236 else
12237 start_address = u_regval - imm_off32;
12239 if (single_reg)
12241 record_buf_mem[memory_index] = start_address;
12242 record_buf_mem[memory_index + 1] = 4;
12243 arm_insn_r->mem_rec_count = 1;
12245 else
12247 record_buf_mem[memory_index] = start_address;
12248 record_buf_mem[memory_index + 1] = 4;
12249 record_buf_mem[memory_index + 2] = start_address + 4;
12250 record_buf_mem[memory_index + 3] = 4;
12251 arm_insn_r->mem_rec_count = 2;
12254 /* VLDR Vector load register. */
12255 else if ((opcode & 0x13) == 0x11)
12257 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12259 if (!single_reg)
12261 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12262 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12264 else
12266 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12267 record_buf[0] = num_regs + reg_vd;
12269 arm_insn_r->reg_rec_count = 1;
12272 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12273 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12274 return 0;
12277 /* Record handler for arm/thumb mode VFP data processing instructions. */
12279 static int
12280 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12282 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12283 uint32_t record_buf[4];
12284 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12285 enum insn_types curr_insn_type = INSN_INV;
12287 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12288 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12289 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12290 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12291 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12292 bit_d = bit (arm_insn_r->arm_insn, 22);
12293 opc1 = opc1 & 0x04;
12295 /* Handle VMLA, VMLS. */
12296 if (opc1 == 0x00)
12298 if (bit (arm_insn_r->arm_insn, 10))
12300 if (bit (arm_insn_r->arm_insn, 6))
12301 curr_insn_type = INSN_T0;
12302 else
12303 curr_insn_type = INSN_T1;
12305 else
12307 if (dp_op_sz)
12308 curr_insn_type = INSN_T1;
12309 else
12310 curr_insn_type = INSN_T2;
12313 /* Handle VNMLA, VNMLS, VNMUL. */
12314 else if (opc1 == 0x01)
12316 if (dp_op_sz)
12317 curr_insn_type = INSN_T1;
12318 else
12319 curr_insn_type = INSN_T2;
12321 /* Handle VMUL. */
12322 else if (opc1 == 0x02 && !(opc3 & 0x01))
12324 if (bit (arm_insn_r->arm_insn, 10))
12326 if (bit (arm_insn_r->arm_insn, 6))
12327 curr_insn_type = INSN_T0;
12328 else
12329 curr_insn_type = INSN_T1;
12331 else
12333 if (dp_op_sz)
12334 curr_insn_type = INSN_T1;
12335 else
12336 curr_insn_type = INSN_T2;
12339 /* Handle VADD, VSUB. */
12340 else if (opc1 == 0x03)
12342 if (!bit (arm_insn_r->arm_insn, 9))
12344 if (bit (arm_insn_r->arm_insn, 6))
12345 curr_insn_type = INSN_T0;
12346 else
12347 curr_insn_type = INSN_T1;
12349 else
12351 if (dp_op_sz)
12352 curr_insn_type = INSN_T1;
12353 else
12354 curr_insn_type = INSN_T2;
12357 /* Handle VDIV. */
12358 else if (opc1 == 0x0b)
12360 if (dp_op_sz)
12361 curr_insn_type = INSN_T1;
12362 else
12363 curr_insn_type = INSN_T2;
12365 /* Handle all other vfp data processing instructions. */
12366 else if (opc1 == 0x0b)
12368 /* Handle VMOV. */
12369 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12371 if (bit (arm_insn_r->arm_insn, 4))
12373 if (bit (arm_insn_r->arm_insn, 6))
12374 curr_insn_type = INSN_T0;
12375 else
12376 curr_insn_type = INSN_T1;
12378 else
12380 if (dp_op_sz)
12381 curr_insn_type = INSN_T1;
12382 else
12383 curr_insn_type = INSN_T2;
12386 /* Handle VNEG and VABS. */
12387 else if ((opc2 == 0x01 && opc3 == 0x01)
12388 || (opc2 == 0x00 && opc3 == 0x03))
12390 if (!bit (arm_insn_r->arm_insn, 11))
12392 if (bit (arm_insn_r->arm_insn, 6))
12393 curr_insn_type = INSN_T0;
12394 else
12395 curr_insn_type = INSN_T1;
12397 else
12399 if (dp_op_sz)
12400 curr_insn_type = INSN_T1;
12401 else
12402 curr_insn_type = INSN_T2;
12405 /* Handle VSQRT. */
12406 else if (opc2 == 0x01 && opc3 == 0x03)
12408 if (dp_op_sz)
12409 curr_insn_type = INSN_T1;
12410 else
12411 curr_insn_type = INSN_T2;
12413 /* Handle VCVT. */
12414 else if (opc2 == 0x07 && opc3 == 0x03)
12416 if (!dp_op_sz)
12417 curr_insn_type = INSN_T1;
12418 else
12419 curr_insn_type = INSN_T2;
12421 else if (opc3 & 0x01)
12423 /* Handle VCVT. */
12424 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12426 if (!bit (arm_insn_r->arm_insn, 18))
12427 curr_insn_type = INSN_T2;
12428 else
12430 if (dp_op_sz)
12431 curr_insn_type = INSN_T1;
12432 else
12433 curr_insn_type = INSN_T2;
12436 /* Handle VCVT. */
12437 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12439 if (dp_op_sz)
12440 curr_insn_type = INSN_T1;
12441 else
12442 curr_insn_type = INSN_T2;
12444 /* Handle VCVTB, VCVTT. */
12445 else if ((opc2 & 0x0e) == 0x02)
12446 curr_insn_type = INSN_T2;
12447 /* Handle VCMP, VCMPE. */
12448 else if ((opc2 & 0x0e) == 0x04)
12449 curr_insn_type = INSN_T3;
12453 switch (curr_insn_type)
12455 case INSN_T0:
12456 reg_vd = reg_vd | (bit_d << 4);
12457 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12458 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12459 arm_insn_r->reg_rec_count = 2;
12460 break;
12462 case INSN_T1:
12463 reg_vd = reg_vd | (bit_d << 4);
12464 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12465 arm_insn_r->reg_rec_count = 1;
12466 break;
12468 case INSN_T2:
12469 reg_vd = (reg_vd << 1) | bit_d;
12470 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12471 arm_insn_r->reg_rec_count = 1;
12472 break;
12474 case INSN_T3:
12475 record_buf[0] = ARM_FPSCR_REGNUM;
12476 arm_insn_r->reg_rec_count = 1;
12477 break;
12479 default:
12480 gdb_assert_not_reached ("no decoding pattern found");
12481 break;
12484 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12485 return 0;
12488 /* Handling opcode 110 insns. */
12490 static int
12491 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12493 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12495 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12496 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12497 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12499 if ((coproc & 0x0e) == 0x0a)
12501 /* Handle extension register ld/st instructions. */
12502 if (!(op1 & 0x20))
12503 return arm_record_exreg_ld_st_insn (arm_insn_r);
12505 /* 64-bit transfers between arm core and extension registers. */
12506 if ((op1 & 0x3e) == 0x04)
12507 return arm_record_exreg_ld_st_insn (arm_insn_r);
12509 else
12511 /* Handle coprocessor ld/st instructions. */
12512 if (!(op1 & 0x3a))
12514 /* Store. */
12515 if (!op1_ebit)
12516 return arm_record_unsupported_insn (arm_insn_r);
12517 else
12518 /* Load. */
12519 return arm_record_unsupported_insn (arm_insn_r);
12522 /* Move to coprocessor from two arm core registers. */
12523 if (op1 == 0x4)
12524 return arm_record_unsupported_insn (arm_insn_r);
12526 /* Move to two arm core registers from coprocessor. */
12527 if (op1 == 0x5)
12529 uint32_t reg_t[2];
12531 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12532 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12533 arm_insn_r->reg_rec_count = 2;
12535 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12536 return 0;
12539 return arm_record_unsupported_insn (arm_insn_r);
12542 /* Handling opcode 111 insns. */
12544 static int
12545 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12547 uint32_t op, op1_sbit, op1_ebit, coproc;
12548 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12549 struct regcache *reg_cache = arm_insn_r->regcache;
12550 ULONGEST u_regval = 0;
12552 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12553 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12554 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12555 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12556 op = bit (arm_insn_r->arm_insn, 4);
12558 /* Handle arm SWI/SVC system call instructions. */
12559 if (op1_sbit)
12561 if (tdep->arm_syscall_record != NULL)
12563 ULONGEST svc_operand, svc_number;
12565 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12567 if (svc_operand) /* OABI. */
12568 svc_number = svc_operand - 0x900000;
12569 else /* EABI. */
12570 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12572 return tdep->arm_syscall_record (reg_cache, svc_number);
12574 else
12576 printf_unfiltered (_("no syscall record support\n"));
12577 return -1;
12581 if ((coproc & 0x0e) == 0x0a)
12583 /* VFP data-processing instructions. */
12584 if (!op1_sbit && !op)
12585 return arm_record_vfp_data_proc_insn (arm_insn_r);
12587 /* Advanced SIMD, VFP instructions. */
12588 if (!op1_sbit && op)
12589 return arm_record_vdata_transfer_insn (arm_insn_r);
12591 else
12593 /* Coprocessor data operations. */
12594 if (!op1_sbit && !op)
12595 return arm_record_unsupported_insn (arm_insn_r);
12597 /* Move to Coprocessor from ARM core register. */
12598 if (!op1_sbit && !op1_ebit && op)
12599 return arm_record_unsupported_insn (arm_insn_r);
12601 /* Move to arm core register from coprocessor. */
12602 if (!op1_sbit && op1_ebit && op)
12604 uint32_t record_buf[1];
12606 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12607 if (record_buf[0] == 15)
12608 record_buf[0] = ARM_PS_REGNUM;
12610 arm_insn_r->reg_rec_count = 1;
12611 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12612 record_buf);
12613 return 0;
12617 return arm_record_unsupported_insn (arm_insn_r);
12620 /* Handling opcode 000 insns. */
12622 static int
12623 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12625 uint32_t record_buf[8];
12626 uint32_t reg_src1 = 0;
12628 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12630 record_buf[0] = ARM_PS_REGNUM;
12631 record_buf[1] = reg_src1;
12632 thumb_insn_r->reg_rec_count = 2;
12634 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12636 return 0;
12640 /* Handling opcode 001 insns. */
12642 static int
12643 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12645 uint32_t record_buf[8];
12646 uint32_t reg_src1 = 0;
12648 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12650 record_buf[0] = ARM_PS_REGNUM;
12651 record_buf[1] = reg_src1;
12652 thumb_insn_r->reg_rec_count = 2;
12654 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12656 return 0;
12659 /* Handling opcode 010 insns. */
12661 static int
12662 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12664 struct regcache *reg_cache = thumb_insn_r->regcache;
12665 uint32_t record_buf[8], record_buf_mem[8];
12667 uint32_t reg_src1 = 0, reg_src2 = 0;
12668 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12670 ULONGEST u_regval[2] = {0};
12672 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12674 if (bit (thumb_insn_r->arm_insn, 12))
12676 /* Handle load/store register offset. */
12677 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12678 if (opcode2 >= 12 && opcode2 <= 15)
12680 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12681 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12682 record_buf[0] = reg_src1;
12683 thumb_insn_r->reg_rec_count = 1;
12685 else if (opcode2 >= 8 && opcode2 <= 10)
12687 /* STR(2), STRB(2), STRH(2) . */
12688 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12689 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12690 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12691 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12692 if (8 == opcode2)
12693 record_buf_mem[0] = 4; /* STR (2). */
12694 else if (10 == opcode2)
12695 record_buf_mem[0] = 1; /* STRB (2). */
12696 else if (9 == opcode2)
12697 record_buf_mem[0] = 2; /* STRH (2). */
12698 record_buf_mem[1] = u_regval[0] + u_regval[1];
12699 thumb_insn_r->mem_rec_count = 1;
12702 else if (bit (thumb_insn_r->arm_insn, 11))
12704 /* Handle load from literal pool. */
12705 /* LDR(3). */
12706 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12707 record_buf[0] = reg_src1;
12708 thumb_insn_r->reg_rec_count = 1;
12710 else if (opcode1)
12712 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12713 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12714 if ((3 == opcode2) && (!opcode3))
12716 /* Branch with exchange. */
12717 record_buf[0] = ARM_PS_REGNUM;
12718 thumb_insn_r->reg_rec_count = 1;
12720 else
12722 /* Format 8; special data processing insns. */
12723 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12724 record_buf[0] = ARM_PS_REGNUM;
12725 record_buf[1] = reg_src1;
12726 thumb_insn_r->reg_rec_count = 2;
12729 else
12731 /* Format 5; data processing insns. */
12732 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12733 if (bit (thumb_insn_r->arm_insn, 7))
12735 reg_src1 = reg_src1 + 8;
12737 record_buf[0] = ARM_PS_REGNUM;
12738 record_buf[1] = reg_src1;
12739 thumb_insn_r->reg_rec_count = 2;
12742 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12743 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12744 record_buf_mem);
12746 return 0;
12749 /* Handling opcode 001 insns. */
12751 static int
12752 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12754 struct regcache *reg_cache = thumb_insn_r->regcache;
12755 uint32_t record_buf[8], record_buf_mem[8];
12757 uint32_t reg_src1 = 0;
12758 uint32_t opcode = 0, immed_5 = 0;
12760 ULONGEST u_regval = 0;
12762 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12764 if (opcode)
12766 /* LDR(1). */
12767 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12768 record_buf[0] = reg_src1;
12769 thumb_insn_r->reg_rec_count = 1;
12771 else
12773 /* STR(1). */
12774 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12775 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12776 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12777 record_buf_mem[0] = 4;
12778 record_buf_mem[1] = u_regval + (immed_5 * 4);
12779 thumb_insn_r->mem_rec_count = 1;
12782 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12783 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12784 record_buf_mem);
12786 return 0;
12789 /* Handling opcode 100 insns. */
12791 static int
12792 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12794 struct regcache *reg_cache = thumb_insn_r->regcache;
12795 uint32_t record_buf[8], record_buf_mem[8];
12797 uint32_t reg_src1 = 0;
12798 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12800 ULONGEST u_regval = 0;
12802 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12804 if (3 == opcode)
12806 /* LDR(4). */
12807 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12808 record_buf[0] = reg_src1;
12809 thumb_insn_r->reg_rec_count = 1;
12811 else if (1 == opcode)
12813 /* LDRH(1). */
12814 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12815 record_buf[0] = reg_src1;
12816 thumb_insn_r->reg_rec_count = 1;
12818 else if (2 == opcode)
12820 /* STR(3). */
12821 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12822 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12823 record_buf_mem[0] = 4;
12824 record_buf_mem[1] = u_regval + (immed_8 * 4);
12825 thumb_insn_r->mem_rec_count = 1;
12827 else if (0 == opcode)
12829 /* STRH(1). */
12830 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12831 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12832 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12833 record_buf_mem[0] = 2;
12834 record_buf_mem[1] = u_regval + (immed_5 * 2);
12835 thumb_insn_r->mem_rec_count = 1;
12838 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12839 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12840 record_buf_mem);
12842 return 0;
12845 /* Handling opcode 101 insns. */
12847 static int
12848 thumb_record_misc (insn_decode_record *thumb_insn_r)
12850 struct regcache *reg_cache = thumb_insn_r->regcache;
12852 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12853 uint32_t register_bits = 0, register_count = 0;
12854 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12855 uint32_t record_buf[24], record_buf_mem[48];
12856 uint32_t reg_src1;
12858 ULONGEST u_regval = 0;
12860 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12861 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12862 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12864 if (14 == opcode2)
12866 /* POP. */
12867 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12868 while (register_bits)
12870 if (register_bits & 0x00000001)
12871 record_buf[index++] = register_count;
12872 register_bits = register_bits >> 1;
12873 register_count++;
12875 record_buf[index++] = ARM_PS_REGNUM;
12876 record_buf[index++] = ARM_SP_REGNUM;
12877 thumb_insn_r->reg_rec_count = index;
12879 else if (10 == opcode2)
12881 /* PUSH. */
12882 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12883 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12884 while (register_bits)
12886 if (register_bits & 0x00000001)
12887 register_count++;
12888 register_bits = register_bits >> 1;
12890 start_address = u_regval - \
12891 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12892 thumb_insn_r->mem_rec_count = register_count;
12893 while (register_count)
12895 record_buf_mem[(register_count * 2) - 1] = start_address;
12896 record_buf_mem[(register_count * 2) - 2] = 4;
12897 start_address = start_address + 4;
12898 register_count--;
12900 record_buf[0] = ARM_SP_REGNUM;
12901 thumb_insn_r->reg_rec_count = 1;
12903 else if (0x1E == opcode1)
12905 /* BKPT insn. */
12906 /* Handle enhanced software breakpoint insn, BKPT. */
12907 /* CPSR is changed to be executed in ARM state, disabling normal
12908 interrupts, entering abort mode. */
12909 /* According to high vector configuration PC is set. */
12910 /* User hits breakpoint and type reverse, in that case, we need to go back with
12911 previous CPSR and Program Counter. */
12912 record_buf[0] = ARM_PS_REGNUM;
12913 record_buf[1] = ARM_LR_REGNUM;
12914 thumb_insn_r->reg_rec_count = 2;
12915 /* We need to save SPSR value, which is not yet done. */
12916 printf_unfiltered (_("Process record does not support instruction "
12917 "0x%0x at address %s.\n"),
12918 thumb_insn_r->arm_insn,
12919 paddress (thumb_insn_r->gdbarch,
12920 thumb_insn_r->this_addr));
12921 return -1;
12923 else if ((0 == opcode) || (1 == opcode))
12925 /* ADD(5), ADD(6). */
12926 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12927 record_buf[0] = reg_src1;
12928 thumb_insn_r->reg_rec_count = 1;
12930 else if (2 == opcode)
12932 /* ADD(7), SUB(4). */
12933 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12934 record_buf[0] = ARM_SP_REGNUM;
12935 thumb_insn_r->reg_rec_count = 1;
12938 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12939 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12940 record_buf_mem);
12942 return 0;
12945 /* Handling opcode 110 insns. */
12947 static int
12948 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12950 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12951 struct regcache *reg_cache = thumb_insn_r->regcache;
12953 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12954 uint32_t reg_src1 = 0;
12955 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12956 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12957 uint32_t record_buf[24], record_buf_mem[48];
12959 ULONGEST u_regval = 0;
12961 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12962 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12964 if (1 == opcode2)
12967 /* LDMIA. */
12968 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12969 /* Get Rn. */
12970 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12971 while (register_bits)
12973 if (register_bits & 0x00000001)
12974 record_buf[index++] = register_count;
12975 register_bits = register_bits >> 1;
12976 register_count++;
12978 record_buf[index++] = reg_src1;
12979 thumb_insn_r->reg_rec_count = index;
12981 else if (0 == opcode2)
12983 /* It handles both STMIA. */
12984 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12985 /* Get Rn. */
12986 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12987 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12988 while (register_bits)
12990 if (register_bits & 0x00000001)
12991 register_count++;
12992 register_bits = register_bits >> 1;
12994 start_address = u_regval;
12995 thumb_insn_r->mem_rec_count = register_count;
12996 while (register_count)
12998 record_buf_mem[(register_count * 2) - 1] = start_address;
12999 record_buf_mem[(register_count * 2) - 2] = 4;
13000 start_address = start_address + 4;
13001 register_count--;
13004 else if (0x1F == opcode1)
13006 /* Handle arm syscall insn. */
13007 if (tdep->arm_syscall_record != NULL)
13009 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13010 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13012 else
13014 printf_unfiltered (_("no syscall record support\n"));
13015 return -1;
13019 /* B (1), conditional branch is automatically taken care in process_record,
13020 as PC is saved there. */
13022 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13023 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13024 record_buf_mem);
13026 return ret;
13029 /* Handling opcode 111 insns. */
13031 static int
13032 thumb_record_branch (insn_decode_record *thumb_insn_r)
13034 uint32_t record_buf[8];
13035 uint32_t bits_h = 0;
13037 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13039 if (2 == bits_h || 3 == bits_h)
13041 /* BL */
13042 record_buf[0] = ARM_LR_REGNUM;
13043 thumb_insn_r->reg_rec_count = 1;
13045 else if (1 == bits_h)
13047 /* BLX(1). */
13048 record_buf[0] = ARM_PS_REGNUM;
13049 record_buf[1] = ARM_LR_REGNUM;
13050 thumb_insn_r->reg_rec_count = 2;
13053 /* B(2) is automatically taken care in process_record, as PC is
13054 saved there. */
13056 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13058 return 0;
13061 /* Handler for thumb2 load/store multiple instructions. */
13063 static int
13064 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13066 struct regcache *reg_cache = thumb2_insn_r->regcache;
13068 uint32_t reg_rn, op;
13069 uint32_t register_bits = 0, register_count = 0;
13070 uint32_t index = 0, start_address = 0;
13071 uint32_t record_buf[24], record_buf_mem[48];
13073 ULONGEST u_regval = 0;
13075 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13076 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13078 if (0 == op || 3 == op)
13080 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13082 /* Handle RFE instruction. */
13083 record_buf[0] = ARM_PS_REGNUM;
13084 thumb2_insn_r->reg_rec_count = 1;
13086 else
13088 /* Handle SRS instruction after reading banked SP. */
13089 return arm_record_unsupported_insn (thumb2_insn_r);
13092 else if (1 == op || 2 == op)
13094 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13096 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13097 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13098 while (register_bits)
13100 if (register_bits & 0x00000001)
13101 record_buf[index++] = register_count;
13103 register_count++;
13104 register_bits = register_bits >> 1;
13106 record_buf[index++] = reg_rn;
13107 record_buf[index++] = ARM_PS_REGNUM;
13108 thumb2_insn_r->reg_rec_count = index;
13110 else
13112 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13113 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13114 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13115 while (register_bits)
13117 if (register_bits & 0x00000001)
13118 register_count++;
13120 register_bits = register_bits >> 1;
13123 if (1 == op)
13125 /* Start address calculation for LDMDB/LDMEA. */
13126 start_address = u_regval;
13128 else if (2 == op)
13130 /* Start address calculation for LDMDB/LDMEA. */
13131 start_address = u_regval - register_count * 4;
13134 thumb2_insn_r->mem_rec_count = register_count;
13135 while (register_count)
13137 record_buf_mem[register_count * 2 - 1] = start_address;
13138 record_buf_mem[register_count * 2 - 2] = 4;
13139 start_address = start_address + 4;
13140 register_count--;
13142 record_buf[0] = reg_rn;
13143 record_buf[1] = ARM_PS_REGNUM;
13144 thumb2_insn_r->reg_rec_count = 2;
13148 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13149 record_buf_mem);
13150 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13151 record_buf);
13152 return ARM_RECORD_SUCCESS;
13155 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13156 instructions. */
13158 static int
13159 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13161 struct regcache *reg_cache = thumb2_insn_r->regcache;
13163 uint32_t reg_rd, reg_rn, offset_imm;
13164 uint32_t reg_dest1, reg_dest2;
13165 uint32_t address, offset_addr;
13166 uint32_t record_buf[8], record_buf_mem[8];
13167 uint32_t op1, op2, op3;
13168 LONGEST s_word;
13170 ULONGEST u_regval[2];
13172 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13173 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13174 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13176 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13178 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13180 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13181 record_buf[0] = reg_dest1;
13182 record_buf[1] = ARM_PS_REGNUM;
13183 thumb2_insn_r->reg_rec_count = 2;
13186 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13188 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13189 record_buf[2] = reg_dest2;
13190 thumb2_insn_r->reg_rec_count = 3;
13193 else
13195 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13196 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13198 if (0 == op1 && 0 == op2)
13200 /* Handle STREX. */
13201 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13202 address = u_regval[0] + (offset_imm * 4);
13203 record_buf_mem[0] = 4;
13204 record_buf_mem[1] = address;
13205 thumb2_insn_r->mem_rec_count = 1;
13206 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13207 record_buf[0] = reg_rd;
13208 thumb2_insn_r->reg_rec_count = 1;
13210 else if (1 == op1 && 0 == op2)
13212 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13213 record_buf[0] = reg_rd;
13214 thumb2_insn_r->reg_rec_count = 1;
13215 address = u_regval[0];
13216 record_buf_mem[1] = address;
13218 if (4 == op3)
13220 /* Handle STREXB. */
13221 record_buf_mem[0] = 1;
13222 thumb2_insn_r->mem_rec_count = 1;
13224 else if (5 == op3)
13226 /* Handle STREXH. */
13227 record_buf_mem[0] = 2 ;
13228 thumb2_insn_r->mem_rec_count = 1;
13230 else if (7 == op3)
13232 /* Handle STREXD. */
13233 address = u_regval[0];
13234 record_buf_mem[0] = 4;
13235 record_buf_mem[2] = 4;
13236 record_buf_mem[3] = address + 4;
13237 thumb2_insn_r->mem_rec_count = 2;
13240 else
13242 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13244 if (bit (thumb2_insn_r->arm_insn, 24))
13246 if (bit (thumb2_insn_r->arm_insn, 23))
13247 offset_addr = u_regval[0] + (offset_imm * 4);
13248 else
13249 offset_addr = u_regval[0] - (offset_imm * 4);
13251 address = offset_addr;
13253 else
13254 address = u_regval[0];
13256 record_buf_mem[0] = 4;
13257 record_buf_mem[1] = address;
13258 record_buf_mem[2] = 4;
13259 record_buf_mem[3] = address + 4;
13260 thumb2_insn_r->mem_rec_count = 2;
13261 record_buf[0] = reg_rn;
13262 thumb2_insn_r->reg_rec_count = 1;
13266 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13267 record_buf);
13268 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13269 record_buf_mem);
13270 return ARM_RECORD_SUCCESS;
13273 /* Handler for thumb2 data processing (shift register and modified immediate)
13274 instructions. */
13276 static int
13277 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13279 uint32_t reg_rd, op;
13280 uint32_t record_buf[8];
13282 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13283 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13285 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13287 record_buf[0] = ARM_PS_REGNUM;
13288 thumb2_insn_r->reg_rec_count = 1;
13290 else
13292 record_buf[0] = reg_rd;
13293 record_buf[1] = ARM_PS_REGNUM;
13294 thumb2_insn_r->reg_rec_count = 2;
13297 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13298 record_buf);
13299 return ARM_RECORD_SUCCESS;
13302 /* Generic handler for thumb2 instructions which effect destination and PS
13303 registers. */
13305 static int
13306 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13308 uint32_t reg_rd;
13309 uint32_t record_buf[8];
13311 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13313 record_buf[0] = reg_rd;
13314 record_buf[1] = ARM_PS_REGNUM;
13315 thumb2_insn_r->reg_rec_count = 2;
13317 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13318 record_buf);
13319 return ARM_RECORD_SUCCESS;
13322 /* Handler for thumb2 branch and miscellaneous control instructions. */
13324 static int
13325 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13327 uint32_t op, op1, op2;
13328 uint32_t record_buf[8];
13330 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13331 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13332 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13334 /* Handle MSR insn. */
13335 if (!(op1 & 0x2) && 0x38 == op)
13337 if (!(op2 & 0x3))
13339 /* CPSR is going to be changed. */
13340 record_buf[0] = ARM_PS_REGNUM;
13341 thumb2_insn_r->reg_rec_count = 1;
13343 else
13345 arm_record_unsupported_insn(thumb2_insn_r);
13346 return -1;
13349 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13351 /* BLX. */
13352 record_buf[0] = ARM_PS_REGNUM;
13353 record_buf[1] = ARM_LR_REGNUM;
13354 thumb2_insn_r->reg_rec_count = 2;
13357 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13358 record_buf);
13359 return ARM_RECORD_SUCCESS;
13362 /* Handler for thumb2 store single data item instructions. */
13364 static int
13365 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13367 struct regcache *reg_cache = thumb2_insn_r->regcache;
13369 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13370 uint32_t address, offset_addr;
13371 uint32_t record_buf[8], record_buf_mem[8];
13372 uint32_t op1, op2;
13374 ULONGEST u_regval[2];
13376 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13377 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13378 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13379 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13381 if (bit (thumb2_insn_r->arm_insn, 23))
13383 /* T2 encoding. */
13384 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13385 offset_addr = u_regval[0] + offset_imm;
13386 address = offset_addr;
13388 else
13390 /* T3 encoding. */
13391 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13393 /* Handle STRB (register). */
13394 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13395 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13396 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13397 offset_addr = u_regval[1] << shift_imm;
13398 address = u_regval[0] + offset_addr;
13400 else
13402 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13403 if (bit (thumb2_insn_r->arm_insn, 10))
13405 if (bit (thumb2_insn_r->arm_insn, 9))
13406 offset_addr = u_regval[0] + offset_imm;
13407 else
13408 offset_addr = u_regval[0] - offset_imm;
13410 address = offset_addr;
13412 else
13413 address = u_regval[0];
13417 switch (op1)
13419 /* Store byte instructions. */
13420 case 4:
13421 case 0:
13422 record_buf_mem[0] = 1;
13423 break;
13424 /* Store half word instructions. */
13425 case 1:
13426 case 5:
13427 record_buf_mem[0] = 2;
13428 break;
13429 /* Store word instructions. */
13430 case 2:
13431 case 6:
13432 record_buf_mem[0] = 4;
13433 break;
13435 default:
13436 gdb_assert_not_reached ("no decoding pattern found");
13437 break;
13440 record_buf_mem[1] = address;
13441 thumb2_insn_r->mem_rec_count = 1;
13442 record_buf[0] = reg_rn;
13443 thumb2_insn_r->reg_rec_count = 1;
13445 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13446 record_buf);
13447 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13448 record_buf_mem);
13449 return ARM_RECORD_SUCCESS;
13452 /* Handler for thumb2 load memory hints instructions. */
13454 static int
13455 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13457 uint32_t record_buf[8];
13458 uint32_t reg_rt, reg_rn;
13460 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13461 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13463 if (ARM_PC_REGNUM != reg_rt)
13465 record_buf[0] = reg_rt;
13466 record_buf[1] = reg_rn;
13467 record_buf[2] = ARM_PS_REGNUM;
13468 thumb2_insn_r->reg_rec_count = 3;
13470 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13471 record_buf);
13472 return ARM_RECORD_SUCCESS;
13475 return ARM_RECORD_FAILURE;
13478 /* Handler for thumb2 load word instructions. */
13480 static int
13481 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13483 uint32_t opcode1 = 0, opcode2 = 0;
13484 uint32_t record_buf[8];
13486 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13487 record_buf[1] = ARM_PS_REGNUM;
13488 thumb2_insn_r->reg_rec_count = 2;
13490 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13491 record_buf);
13492 return ARM_RECORD_SUCCESS;
13495 /* Handler for thumb2 long multiply, long multiply accumulate, and
13496 divide instructions. */
13498 static int
13499 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13501 uint32_t opcode1 = 0, opcode2 = 0;
13502 uint32_t record_buf[8];
13503 uint32_t reg_src1 = 0;
13505 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13506 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13508 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13510 /* Handle SMULL, UMULL, SMULAL. */
13511 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13512 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13513 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13514 record_buf[2] = ARM_PS_REGNUM;
13515 thumb2_insn_r->reg_rec_count = 3;
13517 else if (1 == opcode1 || 3 == opcode2)
13519 /* Handle SDIV and UDIV. */
13520 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13521 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13522 record_buf[2] = ARM_PS_REGNUM;
13523 thumb2_insn_r->reg_rec_count = 3;
13525 else
13526 return ARM_RECORD_FAILURE;
13528 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13529 record_buf);
13530 return ARM_RECORD_SUCCESS;
13533 /* Record handler for thumb32 coprocessor instructions. */
13535 static int
13536 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13538 if (bit (thumb2_insn_r->arm_insn, 25))
13539 return arm_record_coproc_data_proc (thumb2_insn_r);
13540 else
13541 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13544 /* Record handler for advance SIMD structure load/store instructions. */
13546 static int
13547 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13549 struct regcache *reg_cache = thumb2_insn_r->regcache;
13550 uint32_t l_bit, a_bit, b_bits;
13551 uint32_t record_buf[128], record_buf_mem[128];
13552 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13553 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13554 uint8_t f_ebytes;
13556 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13557 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13558 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13559 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13560 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13561 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13562 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13563 f_esize = 8 * f_ebytes;
13564 f_elem = 8 / f_ebytes;
13566 if (!l_bit)
13568 ULONGEST u_regval = 0;
13569 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13570 address = u_regval;
13572 if (!a_bit)
13574 /* Handle VST1. */
13575 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13577 if (b_bits == 0x07)
13578 bf_regs = 1;
13579 else if (b_bits == 0x0a)
13580 bf_regs = 2;
13581 else if (b_bits == 0x06)
13582 bf_regs = 3;
13583 else if (b_bits == 0x02)
13584 bf_regs = 4;
13585 else
13586 bf_regs = 0;
13588 for (index_r = 0; index_r < bf_regs; index_r++)
13590 for (index_e = 0; index_e < f_elem; index_e++)
13592 record_buf_mem[index_m++] = f_ebytes;
13593 record_buf_mem[index_m++] = address;
13594 address = address + f_ebytes;
13595 thumb2_insn_r->mem_rec_count += 1;
13599 /* Handle VST2. */
13600 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13602 if (b_bits == 0x09 || b_bits == 0x08)
13603 bf_regs = 1;
13604 else if (b_bits == 0x03)
13605 bf_regs = 2;
13606 else
13607 bf_regs = 0;
13609 for (index_r = 0; index_r < bf_regs; index_r++)
13610 for (index_e = 0; index_e < f_elem; index_e++)
13612 for (loop_t = 0; loop_t < 2; loop_t++)
13614 record_buf_mem[index_m++] = f_ebytes;
13615 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13616 thumb2_insn_r->mem_rec_count += 1;
13618 address = address + (2 * f_ebytes);
13621 /* Handle VST3. */
13622 else if ((b_bits & 0x0e) == 0x04)
13624 for (index_e = 0; index_e < f_elem; index_e++)
13626 for (loop_t = 0; loop_t < 3; loop_t++)
13628 record_buf_mem[index_m++] = f_ebytes;
13629 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13630 thumb2_insn_r->mem_rec_count += 1;
13632 address = address + (3 * f_ebytes);
13635 /* Handle VST4. */
13636 else if (!(b_bits & 0x0e))
13638 for (index_e = 0; index_e < f_elem; index_e++)
13640 for (loop_t = 0; loop_t < 4; loop_t++)
13642 record_buf_mem[index_m++] = f_ebytes;
13643 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13644 thumb2_insn_r->mem_rec_count += 1;
13646 address = address + (4 * f_ebytes);
13650 else
13652 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13654 if (bft_size == 0x00)
13655 f_ebytes = 1;
13656 else if (bft_size == 0x01)
13657 f_ebytes = 2;
13658 else if (bft_size == 0x02)
13659 f_ebytes = 4;
13660 else
13661 f_ebytes = 0;
13663 /* Handle VST1. */
13664 if (!(b_bits & 0x0b) || b_bits == 0x08)
13665 thumb2_insn_r->mem_rec_count = 1;
13666 /* Handle VST2. */
13667 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13668 thumb2_insn_r->mem_rec_count = 2;
13669 /* Handle VST3. */
13670 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13671 thumb2_insn_r->mem_rec_count = 3;
13672 /* Handle VST4. */
13673 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13674 thumb2_insn_r->mem_rec_count = 4;
13676 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13678 record_buf_mem[index_m] = f_ebytes;
13679 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13683 else
13685 if (!a_bit)
13687 /* Handle VLD1. */
13688 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13689 thumb2_insn_r->reg_rec_count = 1;
13690 /* Handle VLD2. */
13691 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13692 thumb2_insn_r->reg_rec_count = 2;
13693 /* Handle VLD3. */
13694 else if ((b_bits & 0x0e) == 0x04)
13695 thumb2_insn_r->reg_rec_count = 3;
13696 /* Handle VLD4. */
13697 else if (!(b_bits & 0x0e))
13698 thumb2_insn_r->reg_rec_count = 4;
13700 else
13702 /* Handle VLD1. */
13703 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13704 thumb2_insn_r->reg_rec_count = 1;
13705 /* Handle VLD2. */
13706 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13707 thumb2_insn_r->reg_rec_count = 2;
13708 /* Handle VLD3. */
13709 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13710 thumb2_insn_r->reg_rec_count = 3;
13711 /* Handle VLD4. */
13712 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13713 thumb2_insn_r->reg_rec_count = 4;
13715 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13716 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13720 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13722 record_buf[index_r] = reg_rn;
13723 thumb2_insn_r->reg_rec_count += 1;
13726 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13727 record_buf);
13728 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13729 record_buf_mem);
13730 return 0;
13733 /* Decodes thumb2 instruction type and invokes its record handler. */
13735 static unsigned int
13736 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13738 uint32_t op, op1, op2;
13740 op = bit (thumb2_insn_r->arm_insn, 15);
13741 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13742 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13744 if (op1 == 0x01)
13746 if (!(op2 & 0x64 ))
13748 /* Load/store multiple instruction. */
13749 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13751 else if (!((op2 & 0x64) ^ 0x04))
13753 /* Load/store (dual/exclusive) and table branch instruction. */
13754 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13756 else if (!((op2 & 0x20) ^ 0x20))
13758 /* Data-processing (shifted register). */
13759 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13761 else if (op2 & 0x40)
13763 /* Co-processor instructions. */
13764 return thumb2_record_coproc_insn (thumb2_insn_r);
13767 else if (op1 == 0x02)
13769 if (op)
13771 /* Branches and miscellaneous control instructions. */
13772 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13774 else if (op2 & 0x20)
13776 /* Data-processing (plain binary immediate) instruction. */
13777 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13779 else
13781 /* Data-processing (modified immediate). */
13782 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13785 else if (op1 == 0x03)
13787 if (!(op2 & 0x71 ))
13789 /* Store single data item. */
13790 return thumb2_record_str_single_data (thumb2_insn_r);
13792 else if (!((op2 & 0x71) ^ 0x10))
13794 /* Advanced SIMD or structure load/store instructions. */
13795 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13797 else if (!((op2 & 0x67) ^ 0x01))
13799 /* Load byte, memory hints instruction. */
13800 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13802 else if (!((op2 & 0x67) ^ 0x03))
13804 /* Load halfword, memory hints instruction. */
13805 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13807 else if (!((op2 & 0x67) ^ 0x05))
13809 /* Load word instruction. */
13810 return thumb2_record_ld_word (thumb2_insn_r);
13812 else if (!((op2 & 0x70) ^ 0x20))
13814 /* Data-processing (register) instruction. */
13815 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13817 else if (!((op2 & 0x78) ^ 0x30))
13819 /* Multiply, multiply accumulate, abs diff instruction. */
13820 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13822 else if (!((op2 & 0x78) ^ 0x38))
13824 /* Long multiply, long multiply accumulate, and divide. */
13825 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13827 else if (op2 & 0x40)
13829 /* Co-processor instructions. */
13830 return thumb2_record_coproc_insn (thumb2_insn_r);
13834 return -1;
13837 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13838 and positive val on fauilure. */
13840 static int
13841 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13843 gdb_byte buf[insn_size];
13845 memset (&buf[0], 0, insn_size);
13847 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13848 return 1;
13849 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13850 insn_size,
13851 gdbarch_byte_order_for_code (insn_record->gdbarch));
13852 return 0;
13855 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13857 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13858 dispatch it. */
13860 static int
13861 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13862 uint32_t insn_size)
13865 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13866 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13868 arm_record_data_proc_misc_ld_str, /* 000. */
13869 arm_record_data_proc_imm, /* 001. */
13870 arm_record_ld_st_imm_offset, /* 010. */
13871 arm_record_ld_st_reg_offset, /* 011. */
13872 arm_record_ld_st_multiple, /* 100. */
13873 arm_record_b_bl, /* 101. */
13874 arm_record_asimd_vfp_coproc, /* 110. */
13875 arm_record_coproc_data_proc /* 111. */
13878 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13879 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13881 thumb_record_shift_add_sub, /* 000. */
13882 thumb_record_add_sub_cmp_mov, /* 001. */
13883 thumb_record_ld_st_reg_offset, /* 010. */
13884 thumb_record_ld_st_imm_offset, /* 011. */
13885 thumb_record_ld_st_stack, /* 100. */
13886 thumb_record_misc, /* 101. */
13887 thumb_record_ldm_stm_swi, /* 110. */
13888 thumb_record_branch /* 111. */
13891 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13892 uint32_t insn_id = 0;
13894 if (extract_arm_insn (arm_record, insn_size))
13896 if (record_debug)
13898 printf_unfiltered (_("Process record: error reading memory at "
13899 "addr %s len = %d.\n"),
13900 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13902 return -1;
13904 else if (ARM_RECORD == record_type)
13906 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13907 insn_id = bits (arm_record->arm_insn, 25, 27);
13908 ret = arm_record_extension_space (arm_record);
13909 /* If this insn has fallen into extension space
13910 then we need not decode it anymore. */
13911 if (ret != -1 && !INSN_RECORDED(arm_record))
13913 ret = arm_handle_insn[insn_id] (arm_record);
13916 else if (THUMB_RECORD == record_type)
13918 /* As thumb does not have condition codes, we set negative. */
13919 arm_record->cond = -1;
13920 insn_id = bits (arm_record->arm_insn, 13, 15);
13921 ret = thumb_handle_insn[insn_id] (arm_record);
13923 else if (THUMB2_RECORD == record_type)
13925 /* As thumb does not have condition codes, we set negative. */
13926 arm_record->cond = -1;
13928 /* Swap first half of 32bit thumb instruction with second half. */
13929 arm_record->arm_insn
13930 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13932 insn_id = thumb2_record_decode_insn_handler (arm_record);
13934 if (insn_id != ARM_RECORD_SUCCESS)
13936 arm_record_unsupported_insn (arm_record);
13937 ret = -1;
13940 else
13942 /* Throw assertion. */
13943 gdb_assert_not_reached ("not a valid instruction, could not decode");
13946 return ret;
13950 /* Cleans up local record registers and memory allocations. */
13952 static void
13953 deallocate_reg_mem (insn_decode_record *record)
13955 xfree (record->arm_regs);
13956 xfree (record->arm_mems);
13960 /* Parse the current instruction and record the values of the registers and
13961 memory that will be changed in current instruction to record_arch_list".
13962 Return -1 if something is wrong. */
13965 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13966 CORE_ADDR insn_addr)
13969 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13970 uint32_t no_of_rec = 0;
13971 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13972 ULONGEST t_bit = 0, insn_id = 0;
13974 ULONGEST u_regval = 0;
13976 insn_decode_record arm_record;
13978 memset (&arm_record, 0, sizeof (insn_decode_record));
13979 arm_record.regcache = regcache;
13980 arm_record.this_addr = insn_addr;
13981 arm_record.gdbarch = gdbarch;
13984 if (record_debug > 1)
13986 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13987 "addr = %s\n",
13988 paddress (gdbarch, arm_record.this_addr));
13991 if (extract_arm_insn (&arm_record, 2))
13993 if (record_debug)
13995 printf_unfiltered (_("Process record: error reading memory at "
13996 "addr %s len = %d.\n"),
13997 paddress (arm_record.gdbarch,
13998 arm_record.this_addr), 2);
14000 return -1;
14003 /* Check the insn, whether it is thumb or arm one. */
14005 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14006 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14009 if (!(u_regval & t_bit))
14011 /* We are decoding arm insn. */
14012 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14014 else
14016 insn_id = bits (arm_record.arm_insn, 11, 15);
14017 /* is it thumb2 insn? */
14018 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14020 ret = decode_insn (&arm_record, THUMB2_RECORD,
14021 THUMB2_INSN_SIZE_BYTES);
14023 else
14025 /* We are decoding thumb insn. */
14026 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
14030 if (0 == ret)
14032 /* Record registers. */
14033 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14034 if (arm_record.arm_regs)
14036 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14038 if (record_full_arch_list_add_reg
14039 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14040 ret = -1;
14043 /* Record memories. */
14044 if (arm_record.arm_mems)
14046 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14048 if (record_full_arch_list_add_mem
14049 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14050 arm_record.arm_mems[no_of_rec].len))
14051 ret = -1;
14055 if (record_full_arch_list_add_end ())
14056 ret = -1;
14060 deallocate_reg_mem (&arm_record);
14062 return ret;