[PATCH 47/57][Arm][OBJDUMP] Add support for MVE instructions: vaddv, vmlaldav, vmlada...
[binutils-gdb.git] / gdb / arm-tdep.c
blob742bfa570691be025d46f86a20f975ce7f15fd15
1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 #include "defs.h"
22 #include <ctype.h> /* XXX for isupper (). */
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
58 #include "common/vec.h"
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
72 #if GDB_SELF_TEST
73 #include "common/selftest.h"
74 #endif
76 static int arm_debug;
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
96 bfd_vma value;
97 char type;
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
115 "auto",
116 "softfpa",
117 "fpa",
118 "softvfp",
119 "vfp",
120 NULL
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
130 "auto",
131 "APCS",
132 "AAPCS",
133 NULL
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
143 "auto",
144 "arm",
145 "thumb",
146 NULL
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
156 static const struct
158 const char *name;
159 int regnum;
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
162 { "r0", 0 },
163 { "r1", 1 },
164 { "r2", 2 },
165 { "r3", 3 },
166 { "r4", 4 },
167 { "r5", 5 },
168 { "r6", 6 },
169 { "r7", 7 },
170 { "r8", 8 },
171 { "r9", 9 },
172 { "r10", 10 },
173 { "r11", 11 },
174 { "r12", 12 },
175 { "r13", 13 },
176 { "r14", 14 },
177 { "r15", 15 },
178 /* Synonyms (argument and variable registers). */
179 { "a1", 0 },
180 { "a2", 1 },
181 { "a3", 2 },
182 { "a4", 3 },
183 { "v1", 4 },
184 { "v2", 5 },
185 { "v3", 6 },
186 { "v4", 7 },
187 { "v5", 8 },
188 { "v6", 9 },
189 { "v7", 10 },
190 { "v8", 11 },
191 /* Other platform-specific names for r9. */
192 { "sb", 9 },
193 { "tr", 9 },
194 /* Special names. */
195 { "ip", 12 },
196 { "lr", 14 },
197 /* Names used by GCC (not listed in the ARM EABI). */
198 { "sl", 10 },
199 /* A special name from the older ATPCS. */
200 { "wr", 7 },
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
222 style. */
223 static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
227 const char *);
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 readable_regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
236 static CORE_ADDR
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
246 NULL,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
254 CORE_ADDR prev_sp;
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
260 int framesize;
262 /* The register used to hold the frame pointer for this frame. */
263 int framereg;
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
281 int arm_apcs_32 = 1;
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
288 if (gdbarch_tdep (gdbarch)->is_m)
289 return XPSR_T;
290 else
291 return CPSR_T;
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache *regcache)
299 ULONGEST cpsr;
300 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304 return (cpsr & t_bit) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info *frame)
312 CORE_ADDR cpsr;
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321 return (cpsr & t_bit) != 0;
324 /* Callback for VEC_lower_bound. */
326 static inline int
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
330 return lhs->value < rhs->value;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
337 static char
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
340 struct obj_section *sec;
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
344 if (sec != NULL)
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
349 0 };
350 unsigned int idx;
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
354 if (data != NULL)
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
359 struct arm_mapping_symbol *map_sym;
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
373 if (start)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
379 if (idx > 0)
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
382 if (start)
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
390 return 0;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
400 struct bound_minimal_symbol sym;
401 char type;
402 arm_displaced_step_closure *dsc
403 = ((arm_displaced_step_closure * )
404 get_displaced_step_closure_by_addr (memaddr));
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
408 if (dsc)
410 if (debug_displaced)
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
420 return 1;
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
424 return 0;
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
426 return 1;
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
430 return 1;
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
434 if (type)
435 return type == 't';
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym = lookup_minimal_symbol_by_pc (memaddr);
439 if (sym.minsym)
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
444 return 0;
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
446 return 1;
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers)
454 return arm_frame_is_thumb (get_current_frame ());
456 /* Otherwise we're out of luck; we assume ARM. */
457 return 0;
460 /* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
462 architectures.
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
494 static int
495 arm_m_addr_is_magic (CORE_ADDR addr)
497 switch (addr)
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
501 case 0xffffffe1:
502 case 0xffffffe9:
503 case 0xffffffed:
504 case 0xfffffff1:
505 case 0xfffffff9:
506 case 0xfffffffd:
507 /* Address is magic. */
508 return 1;
510 default:
511 /* Address is not magic. */
512 return 0;
516 /* Remove useless bits from addresses in a running program. */
517 static CORE_ADDR
518 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
523 && arm_m_addr_is_magic (val))
524 return val;
526 if (arm_apcs_32)
527 return UNMAKE_THUMB_ADDR (val);
528 else
529 return (val & 0x03fffffc);
532 /* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
535 is being called. */
536 static int
537 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
540 struct bound_minimal_symbol msym;
542 msym = lookup_minimal_symbol_by_pc (pc);
543 if (msym.minsym != NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
549 /* The GNU linker's Thumb call stub to foo is named
550 __foo_from_thumb. */
551 if (strstr (name, "_from_thumb") != NULL)
552 name += 2;
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
556 functions. */
557 if (startswith (name, "__truncdfsf2"))
558 return 1;
559 if (startswith (name, "__aeabi_d2f"))
560 return 1;
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name, "__tls_get_addr"))
564 return 1;
565 if (startswith (name, "__aeabi_read_tp"))
566 return 1;
568 else
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
575 if (!is_thumb
576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
579 == 0xe240f01f) /* sub pc, r0, #31 */
580 return 1;
583 return 0;
586 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
588 instruction. */
589 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
595 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
601 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
603 static unsigned int
604 thumb_expand_immediate (unsigned int imm)
606 unsigned int count = imm >> 7;
608 if (count < 8)
609 switch (count / 2)
611 case 0:
612 return imm & 0xff;
613 case 1:
614 return (imm & 0xff) | ((imm & 0xff) << 16);
615 case 2:
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
617 case 3:
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
622 return (0x80 | (imm & 0x7f)) << (32 - count);
625 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
628 static int
629 thumb_instruction_restores_sp (unsigned short insn)
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
636 /* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
642 static CORE_ADDR
643 thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
649 int i;
650 pv_t regs[16];
651 CORE_ADDR offset;
652 CORE_ADDR unrecognized_pc = 0;
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
656 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
658 while (start < limit)
660 unsigned short insn;
662 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
666 int regno;
667 int mask;
669 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
670 break;
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
681 -4);
682 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
687 offset = (insn & 0x7f) << 2; /* get scaled offset */
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 -offset);
691 else if (thumb_instruction_restores_sp (insn))
693 /* Don't scan past the epilogue. */
694 break;
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
698 (insn & 0xff) << 2);
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
702 bits (insn, 6, 8));
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
706 bits (insn, 0, 7));
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
731 pv_t addr;
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
736 if (stack.store_would_trash (addr))
737 break;
739 stack.store (addr, 4, regs[regno]);
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
745 pv_t addr;
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
750 if (stack.store_would_trash (addr))
751 break;
753 stack.store (addr, 4, regs[rd]);
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
777 on Thumb. */
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
783 CORE_ADDR loc;
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
791 unsigned short inst2;
793 inst2 = read_code_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
801 CORE_ADDR nextpc;
802 int j1, j2, imm1, imm2;
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
819 break;
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
823 { registers } */
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
826 pv_t addr = regs[bits (insn, 0, 3)];
827 int regno;
829 if (stack.store_would_trash (addr))
830 break;
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
836 addr = pv_add_constant (addr, -4);
837 stack.store (addr, 4, regs[regno]);
840 if (insn & 0x0020)
841 regs[bits (insn, 0, 3)] = addr;
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
845 [Rn, #+/-imm]{!} */
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
852 offset = inst2 & 0xff;
853 if (insn & 0x0080)
854 addr = pv_add_constant (addr, offset);
855 else
856 addr = pv_add_constant (addr, -offset);
858 if (stack.store_would_trash (addr))
859 break;
861 stack.store (addr, 4, regs[regno1]);
862 stack.store (pv_add_constant (addr, 4),
863 4, regs[regno2]);
865 if (insn & 0x0020)
866 regs[bits (insn, 0, 3)] = addr;
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
876 offset = inst2 & 0xff;
877 if (inst2 & 0x0200)
878 addr = pv_add_constant (addr, offset);
879 else
880 addr = pv_add_constant (addr, -offset);
882 if (stack.store_would_trash (addr))
883 break;
885 stack.store (addr, 4, regs[regno]);
887 if (inst2 & 0x0100)
888 regs[bits (insn, 0, 3)] = addr;
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 int regno = bits (inst2, 12, 15);
895 pv_t addr;
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
900 if (stack.store_would_trash (addr))
901 break;
903 stack.store (addr, 4, regs[regno]);
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 /* Ignore stores of argument registers to the stack. */
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 /* Ignore stores of argument registers to the stack. */
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
918 { registers } */
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
926 [Rn, #+/-imm] */
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Similarly ignore dual loads from the stack. */
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
934 /* Similarly ignore single loads from the stack. */
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore single loads from the stack. */
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1000 unsigned int imm
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1016 /* Constant pool loads. */
1017 unsigned int constant;
1018 CORE_ADDR loc;
1020 offset = bits (inst2, 0, 11);
1021 if (insn & 0x0080)
1022 loc = start + 4 + offset;
1023 else
1024 loc = start + 4 - offset;
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1032 /* Constant pool loads. */
1033 unsigned int constant;
1034 CORE_ADDR loc;
1036 offset = bits (inst2, 0, 7) << 2;
1037 if (insn & 0x0080)
1038 loc = start + 4 + offset;
1039 else
1040 loc = start + 4 - offset;
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1051 /* Don't scan past anything that might change control flow. */
1052 break;
1054 else
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1061 start += 2;
1063 else if (thumb_instruction_changes_pc (insn))
1065 /* Don't scan past anything that might change control flow. */
1066 break;
1068 else
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1075 start += 2;
1078 if (arm_debug)
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1085 if (cache == NULL)
1086 return unrecognized_pc;
1088 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache->framereg = ARM_FP_REGNUM;
1092 cache->framesize = -regs[ARM_FP_REGNUM].k;
1094 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache->framereg = THUMB_FP_REGNUM;
1098 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1100 else
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache->framereg = ARM_SP_REGNUM;
1104 cache->framesize = -regs[ARM_SP_REGNUM].k;
1107 for (i = 0; i < 16; i++)
1108 if (stack.find_reg (gdbarch, i, &offset))
1109 cache->saved_regs[i].addr = offset;
1111 return unrecognized_pc;
1115 /* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1119 not recognized. */
1121 static CORE_ADDR
1122 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1123 unsigned int *destreg, int *offset)
1125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1126 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1127 unsigned int low, high, address;
1129 address = 0;
1130 if (is_thumb)
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1135 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1137 *destreg = bits (insn1, 8, 10);
1138 *offset = 2;
1139 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1140 address = read_memory_unsigned_integer (address, 4,
1141 byte_order_for_code);
1143 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1148 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1150 insn1
1151 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1152 insn2
1153 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1155 /* movt Rd, #const */
1156 if ((insn1 & 0xfbc0) == 0xf2c0)
1158 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 *destreg = bits (insn2, 8, 11);
1160 *offset = 8;
1161 address = (high << 16 | low);
1165 else
1167 unsigned int insn
1168 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1170 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1172 address = bits (insn, 0, 11) + pc + 8;
1173 address = read_memory_unsigned_integer (address, 4,
1174 byte_order_for_code);
1176 *destreg = bits (insn, 12, 15);
1177 *offset = 4;
1179 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1181 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1183 insn
1184 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1186 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1188 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189 *destreg = bits (insn, 12, 15);
1190 *offset = 8;
1191 address = (high << 16 | low);
1196 return address;
1199 /* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1212 they are,
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1217 On ARMv5t, it is,
1219 ldr Rn, .Label
1220 ....
1221 .Lable:
1222 .word __stack_chk_guard
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1229 static CORE_ADDR
1230 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1233 unsigned int basereg;
1234 struct bound_minimal_symbol stack_chk_guard;
1235 int offset;
1236 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1237 CORE_ADDR addr;
1239 /* Try to parse the instructions in Step 1. */
1240 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1241 &basereg, &offset);
1242 if (!addr)
1243 return pc;
1245 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard.minsym == NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1250 return pc;
1252 if (is_thumb)
1254 unsigned int destreg;
1255 unsigned short insn
1256 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn & 0xf800) != 0x6800)
1260 return pc;
1261 if (bits (insn, 3, 5) != basereg)
1262 return pc;
1263 destreg = bits (insn, 0, 2);
1265 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1266 byte_order_for_code);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6000)
1269 return pc;
1270 if (destreg != bits (insn, 0, 2))
1271 return pc;
1273 else
1275 unsigned int destreg;
1276 unsigned int insn
1277 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn & 0x0e500000) != 0x04100000)
1281 return pc;
1282 if (bits (insn, 16, 19) != basereg)
1283 return pc;
1284 destreg = bits (insn, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn = read_code_unsigned_integer (pc + offset + 4,
1287 4, byte_order_for_code);
1288 if ((insn & 0x0e500000) != 0x04000000)
1289 return pc;
1290 if (bits (insn, 12, 15) != destreg)
1291 return pc;
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1294 on arm. */
1295 if (is_thumb)
1296 return pc + offset + 4;
1297 else
1298 return pc + offset + 8;
1301 /* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1304 The APCS (ARM Procedure Call Standard) defines the following
1305 prologue:
1307 mov ip, sp
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1316 static CORE_ADDR
1317 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1319 CORE_ADDR func_addr, limit_pc;
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1323 is greater. */
1324 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch, func_addr);
1328 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1330 if (post_prologue_pc)
1331 post_prologue_pc
1332 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1342 && (cust == NULL
1343 || COMPUNIT_PRODUCER (cust) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1346 return post_prologue_pc;
1348 if (post_prologue_pc != 0)
1350 CORE_ADDR analyzed_limit;
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1360 brace. */
1361 if (arm_pc_is_thumb (gdbarch, func_addr))
1362 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1363 post_prologue_pc, NULL);
1364 else
1365 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1366 post_prologue_pc, NULL);
1368 if (analyzed_limit != post_prologue_pc)
1369 return func_addr;
1371 return post_prologue_pc;
1375 /* Can't determine prologue from the symbol table, need to examine
1376 instructions. */
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1383 if (limit_pc == 0)
1384 limit_pc = pc + 64; /* Magic. */
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch, pc))
1389 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1390 else
1391 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1394 /* *INDENT-OFF* */
1395 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1405 20 LR
1406 16 R7
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1414 /* *INDENT-ON* */
1416 static void
1417 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1418 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1420 CORE_ADDR prologue_start;
1421 CORE_ADDR prologue_end;
1423 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1424 &prologue_end))
1426 /* See comment in arm_scan_prologue for an explanation of
1427 this heuristics. */
1428 if (prologue_end > prologue_start + 64)
1430 prologue_end = prologue_start + 64;
1433 else
1434 /* We're in the boondocks: we have no idea where the start of the
1435 function is. */
1436 return;
1438 prologue_end = std::min (prologue_end, prev_pc);
1440 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1443 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1444 otherwise. */
1446 static int
1447 arm_instruction_restores_sp (unsigned int insn)
1449 if (bits (insn, 28, 31) != INST_NV)
1451 if ((insn & 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn & 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn & 0x0ffffff0) == 0x01a0d000
1456 /* MOV SP. */
1457 || (insn & 0x0fff0000) == 0x08bd0000
1458 /* POP (LDMIA). */
1459 || (insn & 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1461 return 1;
1464 return 0;
1467 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1470 instruction.
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1477 static CORE_ADDR
1478 arm_analyze_prologue (struct gdbarch *gdbarch,
1479 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1480 struct arm_prologue_cache *cache)
1482 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1483 int regno;
1484 CORE_ADDR offset, current_pc;
1485 pv_t regs[ARM_FPS_REGNUM];
1486 CORE_ADDR unrecognized_pc = 0;
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1496 traceback. */
1498 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1499 regs[regno] = pv_register (regno, 0);
1500 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1502 for (current_pc = prologue_start;
1503 current_pc < prologue_end;
1504 current_pc += 4)
1506 unsigned int insn
1507 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1509 if (insn == 0xe1a0c00d) /* mov ip, sp */
1511 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1512 continue;
1514 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1517 unsigned imm = insn & 0xff; /* immediate value */
1518 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1519 int rd = bits (insn, 12, 15);
1520 imm = (imm >> rot) | (imm << (32 - rot));
1521 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1522 continue;
1524 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1532 continue;
1534 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1535 [sp, #-4]! */
1537 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1538 break;
1539 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1540 stack.store (regs[ARM_SP_REGNUM], 4,
1541 regs[bits (insn, 12, 15)]);
1542 continue;
1544 else if ((insn & 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1547 stmfd sp!, {a1, a2, a3, a4} */
1549 int mask = insn & 0xffff;
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1552 break;
1554 /* Calculate offsets of saved registers. */
1555 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1556 if (mask & (1 << regno))
1558 regs[ARM_SP_REGNUM]
1559 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1560 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1563 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1568 continue;
1570 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1575 continue;
1577 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1578 { registers } */
1579 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1582 continue;
1584 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1586 unsigned imm = insn & 0xff; /* immediate value */
1587 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1588 imm = (imm >> rot) | (imm << (32 - rot));
1589 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1591 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1595 imm = (imm >> rot) | (imm << (32 - rot));
1596 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1598 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1599 [sp, -#c]! */
1600 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1602 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1603 break;
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1606 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1607 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1609 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1610 [sp!] */
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1613 int n_saved_fp_regs;
1614 unsigned int fp_start_reg, fp_bound_reg;
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1617 break;
1619 if ((insn & 0x800) == 0x800) /* N0 is set */
1621 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs = 3;
1623 else
1624 n_saved_fp_regs = 1;
1626 else
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 2;
1630 else
1631 n_saved_fp_regs = 4;
1634 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1635 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1636 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1638 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1639 stack.store (regs[ARM_SP_REGNUM], 12,
1640 regs[fp_start_reg++]);
1643 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1647 the stack. */
1648 CORE_ADDR dest = BranchDest (current_pc, insn);
1650 if (skip_prologue_function (gdbarch, dest, 0))
1651 continue;
1652 else
1653 break;
1655 else if ((insn & 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn))
1658 /* Don't scan past anything that might change control flow. */
1659 break;
1660 else if (arm_instruction_restores_sp (insn))
1662 /* Don't scan past the epilogue. */
1663 break;
1665 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1669 continue;
1670 else if ((insn & 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1672 /* Similarly ignore single loads from the stack. */
1673 continue;
1674 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1677 continue;
1678 else
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1685 instruction. */
1686 unrecognized_pc = current_pc;
1687 if (cache != NULL)
1688 continue;
1689 else
1690 break;
1694 if (unrecognized_pc == 0)
1695 unrecognized_pc = current_pc;
1697 if (cache)
1699 int framereg, framesize;
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1705 /* Frame pointer is fp. */
1706 framereg = ARM_FP_REGNUM;
1707 framesize = -regs[ARM_FP_REGNUM].k;
1709 else
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg = ARM_SP_REGNUM;
1713 framesize = -regs[ARM_SP_REGNUM].k;
1716 cache->framereg = framereg;
1717 cache->framesize = framesize;
1719 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1720 if (stack.find_reg (gdbarch, regno, &offset))
1721 cache->saved_regs[regno].addr = offset;
1724 if (arm_debug)
1725 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch, unrecognized_pc));
1728 return unrecognized_pc;
1731 static void
1732 arm_scan_prologue (struct frame_info *this_frame,
1733 struct arm_prologue_cache *cache)
1735 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1736 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1737 CORE_ADDR prologue_start, prologue_end;
1738 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1739 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1741 /* Assume there is no frame until proven otherwise. */
1742 cache->framereg = ARM_SP_REGNUM;
1743 cache->framesize = 0;
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame))
1748 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1749 return;
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1755 &prologue_end))
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1762 if (sal.line == 0)
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1781 The expression
1783 prologue_start + 64
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1787 the scheduler. */
1789 if (prologue_end > prologue_start + 64)
1791 prologue_end = prologue_start + 64; /* See above. */
1794 else
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc;
1802 ULONGEST return_value;
1804 /* AAPCS does not use a frame register, so we can abort here. */
1805 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1806 return;
1808 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1809 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1810 &return_value))
1811 return;
1812 else
1814 prologue_start = gdbarch_addr_bits_remove
1815 (gdbarch, return_value) - 8;
1816 prologue_end = prologue_start + 64; /* See above. */
1820 if (prev_pc < prologue_end)
1821 prologue_end = prev_pc;
1823 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1826 static struct arm_prologue_cache *
1827 arm_make_prologue_cache (struct frame_info *this_frame)
1829 int reg;
1830 struct arm_prologue_cache *cache;
1831 CORE_ADDR unwound_fp;
1833 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1834 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1836 arm_scan_prologue (this_frame, cache);
1838 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1839 if (unwound_fp == 0)
1840 return cache;
1842 cache->prev_sp = unwound_fp + cache->framesize;
1844 /* Calculate actual addresses of saved registers using offsets
1845 determined by arm_scan_prologue. */
1846 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1847 if (trad_frame_addr_p (cache->saved_regs, reg))
1848 cache->saved_regs[reg].addr += cache->prev_sp;
1850 return cache;
1853 /* Implementation of the stop_reason hook for arm_prologue frames. */
1855 static enum unwind_stop_reason
1856 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1857 void **this_cache)
1859 struct arm_prologue_cache *cache;
1860 CORE_ADDR pc;
1862 if (*this_cache == NULL)
1863 *this_cache = arm_make_prologue_cache (this_frame);
1864 cache = (struct arm_prologue_cache *) *this_cache;
1866 /* This is meant to halt the backtrace at "_start". */
1867 pc = get_frame_pc (this_frame);
1868 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1869 return UNWIND_OUTERMOST;
1871 /* If we've hit a wall, stop. */
1872 if (cache->prev_sp == 0)
1873 return UNWIND_OUTERMOST;
1875 return UNWIND_NO_REASON;
1878 /* Our frame ID for a normal frame is the current function's starting PC
1879 and the caller's SP when we were called. */
1881 static void
1882 arm_prologue_this_id (struct frame_info *this_frame,
1883 void **this_cache,
1884 struct frame_id *this_id)
1886 struct arm_prologue_cache *cache;
1887 struct frame_id id;
1888 CORE_ADDR pc, func;
1890 if (*this_cache == NULL)
1891 *this_cache = arm_make_prologue_cache (this_frame);
1892 cache = (struct arm_prologue_cache *) *this_cache;
1894 /* Use function start address as part of the frame ID. If we cannot
1895 identify the start address (due to missing symbol information),
1896 fall back to just using the current PC. */
1897 pc = get_frame_pc (this_frame);
1898 func = get_frame_func (this_frame);
1899 if (!func)
1900 func = pc;
1902 id = frame_id_build (cache->prev_sp, func);
1903 *this_id = id;
1906 static struct value *
1907 arm_prologue_prev_register (struct frame_info *this_frame,
1908 void **this_cache,
1909 int prev_regnum)
1911 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1912 struct arm_prologue_cache *cache;
1914 if (*this_cache == NULL)
1915 *this_cache = arm_make_prologue_cache (this_frame);
1916 cache = (struct arm_prologue_cache *) *this_cache;
1918 /* If we are asked to unwind the PC, then we need to return the LR
1919 instead. The prologue may save PC, but it will point into this
1920 frame's prologue, not the next frame's resume location. Also
1921 strip the saved T bit. A valid LR may have the low bit set, but
1922 a valid PC never does. */
1923 if (prev_regnum == ARM_PC_REGNUM)
1925 CORE_ADDR lr;
1927 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1928 return frame_unwind_got_constant (this_frame, prev_regnum,
1929 arm_addr_bits_remove (gdbarch, lr));
1932 /* SP is generally not saved to the stack, but this frame is
1933 identified by the next frame's stack pointer at the time of the call.
1934 The value was already reconstructed into PREV_SP. */
1935 if (prev_regnum == ARM_SP_REGNUM)
1936 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1938 /* The CPSR may have been changed by the call instruction and by the
1939 called function. The only bit we can reconstruct is the T bit,
1940 by checking the low bit of LR as of the call. This is a reliable
1941 indicator of Thumb-ness except for some ARM v4T pre-interworking
1942 Thumb code, which could get away with a clear low bit as long as
1943 the called function did not use bx. Guess that all other
1944 bits are unchanged; the condition flags are presumably lost,
1945 but the processor status is likely valid. */
1946 if (prev_regnum == ARM_PS_REGNUM)
1948 CORE_ADDR lr, cpsr;
1949 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1951 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1952 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1953 if (IS_THUMB_ADDR (lr))
1954 cpsr |= t_bit;
1955 else
1956 cpsr &= ~t_bit;
1957 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1960 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1961 prev_regnum);
1964 struct frame_unwind arm_prologue_unwind = {
1965 NORMAL_FRAME,
1966 arm_prologue_unwind_stop_reason,
1967 arm_prologue_this_id,
1968 arm_prologue_prev_register,
1969 NULL,
1970 default_frame_sniffer
1973 /* Maintain a list of ARM exception table entries per objfile, similar to the
1974 list of mapping symbols. We only cache entries for standard ARM-defined
1975 personality routines; the cache will contain only the frame unwinding
1976 instructions associated with the entry (not the descriptors). */
1978 static const struct objfile_data *arm_exidx_data_key;
1980 struct arm_exidx_entry
1982 bfd_vma addr;
1983 gdb_byte *entry;
1985 typedef struct arm_exidx_entry arm_exidx_entry_s;
1986 DEF_VEC_O(arm_exidx_entry_s);
1988 struct arm_exidx_data
1990 VEC(arm_exidx_entry_s) **section_maps;
1993 static void
1994 arm_exidx_data_free (struct objfile *objfile, void *arg)
1996 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1997 unsigned int i;
1999 for (i = 0; i < objfile->obfd->section_count; i++)
2000 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2003 static inline int
2004 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2005 const struct arm_exidx_entry *rhs)
2007 return lhs->addr < rhs->addr;
2010 static struct obj_section *
2011 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2013 struct obj_section *osect;
2015 ALL_OBJFILE_OSECTIONS (objfile, osect)
2016 if (bfd_get_section_flags (objfile->obfd,
2017 osect->the_bfd_section) & SEC_ALLOC)
2019 bfd_vma start, size;
2020 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2021 size = bfd_get_section_size (osect->the_bfd_section);
2023 if (start <= vma && vma < start + size)
2024 return osect;
2027 return NULL;
2030 /* Parse contents of exception table and exception index sections
2031 of OBJFILE, and fill in the exception table entry cache.
2033 For each entry that refers to a standard ARM-defined personality
2034 routine, extract the frame unwinding instructions (from either
2035 the index or the table section). The unwinding instructions
2036 are normalized by:
2037 - extracting them from the rest of the table data
2038 - converting to host endianness
2039 - appending the implicit 0xb0 ("Finish") code
2041 The extracted and normalized instructions are stored for later
2042 retrieval by the arm_find_exidx_entry routine. */
2044 static void
2045 arm_exidx_new_objfile (struct objfile *objfile)
2047 struct arm_exidx_data *data;
2048 asection *exidx, *extab;
2049 bfd_vma exidx_vma = 0, extab_vma = 0;
2050 LONGEST i;
2052 /* If we've already touched this file, do nothing. */
2053 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2054 return;
2056 /* Read contents of exception table and index. */
2057 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2058 gdb::byte_vector exidx_data;
2059 if (exidx)
2061 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2062 exidx_data.resize (bfd_get_section_size (exidx));
2064 if (!bfd_get_section_contents (objfile->obfd, exidx,
2065 exidx_data.data (), 0,
2066 exidx_data.size ()))
2067 return;
2070 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2071 gdb::byte_vector extab_data;
2072 if (extab)
2074 extab_vma = bfd_section_vma (objfile->obfd, extab);
2075 extab_data.resize (bfd_get_section_size (extab));
2077 if (!bfd_get_section_contents (objfile->obfd, extab,
2078 extab_data.data (), 0,
2079 extab_data.size ()))
2080 return;
2083 /* Allocate exception table data structure. */
2084 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2085 set_objfile_data (objfile, arm_exidx_data_key, data);
2086 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2087 objfile->obfd->section_count,
2088 VEC(arm_exidx_entry_s) *);
2090 /* Fill in exception table. */
2091 for (i = 0; i < exidx_data.size () / 8; i++)
2093 struct arm_exidx_entry new_exidx_entry;
2094 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2095 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2096 exidx_data.data () + i * 8 + 4);
2097 bfd_vma addr = 0, word = 0;
2098 int n_bytes = 0, n_words = 0;
2099 struct obj_section *sec;
2100 gdb_byte *entry = NULL;
2102 /* Extract address of start of function. */
2103 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2104 idx += exidx_vma + i * 8;
2106 /* Find section containing function and compute section offset. */
2107 sec = arm_obj_section_from_vma (objfile, idx);
2108 if (sec == NULL)
2109 continue;
2110 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2112 /* Determine address of exception table entry. */
2113 if (val == 1)
2115 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2117 else if ((val & 0xff000000) == 0x80000000)
2119 /* Exception table entry embedded in .ARM.exidx
2120 -- must be short form. */
2121 word = val;
2122 n_bytes = 3;
2124 else if (!(val & 0x80000000))
2126 /* Exception table entry in .ARM.extab. */
2127 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2128 addr += exidx_vma + i * 8 + 4;
2130 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2132 word = bfd_h_get_32 (objfile->obfd,
2133 extab_data.data () + addr - extab_vma);
2134 addr += 4;
2136 if ((word & 0xff000000) == 0x80000000)
2138 /* Short form. */
2139 n_bytes = 3;
2141 else if ((word & 0xff000000) == 0x81000000
2142 || (word & 0xff000000) == 0x82000000)
2144 /* Long form. */
2145 n_bytes = 2;
2146 n_words = ((word >> 16) & 0xff);
2148 else if (!(word & 0x80000000))
2150 bfd_vma pers;
2151 struct obj_section *pers_sec;
2152 int gnu_personality = 0;
2154 /* Custom personality routine. */
2155 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2156 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2158 /* Check whether we've got one of the variants of the
2159 GNU personality routines. */
2160 pers_sec = arm_obj_section_from_vma (objfile, pers);
2161 if (pers_sec)
2163 static const char *personality[] =
2165 "__gcc_personality_v0",
2166 "__gxx_personality_v0",
2167 "__gcj_personality_v0",
2168 "__gnu_objc_personality_v0",
2169 NULL
2172 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2173 int k;
2175 for (k = 0; personality[k]; k++)
2176 if (lookup_minimal_symbol_by_pc_name
2177 (pc, personality[k], objfile))
2179 gnu_personality = 1;
2180 break;
2184 /* If so, the next word contains a word count in the high
2185 byte, followed by the same unwind instructions as the
2186 pre-defined forms. */
2187 if (gnu_personality
2188 && addr + 4 <= extab_vma + extab_data.size ())
2190 word = bfd_h_get_32 (objfile->obfd,
2191 (extab_data.data ()
2192 + addr - extab_vma));
2193 addr += 4;
2194 n_bytes = 3;
2195 n_words = ((word >> 24) & 0xff);
2201 /* Sanity check address. */
2202 if (n_words)
2203 if (addr < extab_vma
2204 || addr + 4 * n_words > extab_vma + extab_data.size ())
2205 n_words = n_bytes = 0;
2207 /* The unwind instructions reside in WORD (only the N_BYTES least
2208 significant bytes are valid), followed by N_WORDS words in the
2209 extab section starting at ADDR. */
2210 if (n_bytes || n_words)
2212 gdb_byte *p = entry
2213 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2214 n_bytes + n_words * 4 + 1);
2216 while (n_bytes--)
2217 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2219 while (n_words--)
2221 word = bfd_h_get_32 (objfile->obfd,
2222 extab_data.data () + addr - extab_vma);
2223 addr += 4;
2225 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2226 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2227 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2228 *p++ = (gdb_byte) (word & 0xff);
2231 /* Implied "Finish" to terminate the list. */
2232 *p++ = 0xb0;
2235 /* Push entry onto vector. They are guaranteed to always
2236 appear in order of increasing addresses. */
2237 new_exidx_entry.addr = idx;
2238 new_exidx_entry.entry = entry;
2239 VEC_safe_push (arm_exidx_entry_s,
2240 data->section_maps[sec->the_bfd_section->index],
2241 &new_exidx_entry);
2245 /* Search for the exception table entry covering MEMADDR. If one is found,
2246 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2247 set *START to the start of the region covered by this entry. */
2249 static gdb_byte *
2250 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2252 struct obj_section *sec;
2254 sec = find_pc_section (memaddr);
2255 if (sec != NULL)
2257 struct arm_exidx_data *data;
2258 VEC(arm_exidx_entry_s) *map;
2259 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2260 unsigned int idx;
2262 data = ((struct arm_exidx_data *)
2263 objfile_data (sec->objfile, arm_exidx_data_key));
2264 if (data != NULL)
2266 map = data->section_maps[sec->the_bfd_section->index];
2267 if (!VEC_empty (arm_exidx_entry_s, map))
2269 struct arm_exidx_entry *map_sym;
2271 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2272 arm_compare_exidx_entries);
2274 /* VEC_lower_bound finds the earliest ordered insertion
2275 point. If the following symbol starts at this exact
2276 address, we use that; otherwise, the preceding
2277 exception table entry covers this address. */
2278 if (idx < VEC_length (arm_exidx_entry_s, map))
2280 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2281 if (map_sym->addr == map_key.addr)
2283 if (start)
2284 *start = map_sym->addr + obj_section_addr (sec);
2285 return map_sym->entry;
2289 if (idx > 0)
2291 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2292 if (start)
2293 *start = map_sym->addr + obj_section_addr (sec);
2294 return map_sym->entry;
2300 return NULL;
2303 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2304 instruction list from the ARM exception table entry ENTRY, allocate and
2305 return a prologue cache structure describing how to unwind this frame.
2307 Return NULL if the unwinding instruction list contains a "spare",
2308 "reserved" or "refuse to unwind" instruction as defined in section
2309 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2310 for the ARM Architecture" document. */
2312 static struct arm_prologue_cache *
2313 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2315 CORE_ADDR vsp = 0;
2316 int vsp_valid = 0;
2318 struct arm_prologue_cache *cache;
2319 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2320 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2322 for (;;)
2324 gdb_byte insn;
2326 /* Whenever we reload SP, we actually have to retrieve its
2327 actual value in the current frame. */
2328 if (!vsp_valid)
2330 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2332 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2333 vsp = get_frame_register_unsigned (this_frame, reg);
2335 else
2337 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2338 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2341 vsp_valid = 1;
2344 /* Decode next unwind instruction. */
2345 insn = *entry++;
2347 if ((insn & 0xc0) == 0)
2349 int offset = insn & 0x3f;
2350 vsp += (offset << 2) + 4;
2352 else if ((insn & 0xc0) == 0x40)
2354 int offset = insn & 0x3f;
2355 vsp -= (offset << 2) + 4;
2357 else if ((insn & 0xf0) == 0x80)
2359 int mask = ((insn & 0xf) << 8) | *entry++;
2360 int i;
2362 /* The special case of an all-zero mask identifies
2363 "Refuse to unwind". We return NULL to fall back
2364 to the prologue analyzer. */
2365 if (mask == 0)
2366 return NULL;
2368 /* Pop registers r4..r15 under mask. */
2369 for (i = 0; i < 12; i++)
2370 if (mask & (1 << i))
2372 cache->saved_regs[4 + i].addr = vsp;
2373 vsp += 4;
2376 /* Special-case popping SP -- we need to reload vsp. */
2377 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2378 vsp_valid = 0;
2380 else if ((insn & 0xf0) == 0x90)
2382 int reg = insn & 0xf;
2384 /* Reserved cases. */
2385 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2386 return NULL;
2388 /* Set SP from another register and mark VSP for reload. */
2389 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2390 vsp_valid = 0;
2392 else if ((insn & 0xf0) == 0xa0)
2394 int count = insn & 0x7;
2395 int pop_lr = (insn & 0x8) != 0;
2396 int i;
2398 /* Pop r4..r[4+count]. */
2399 for (i = 0; i <= count; i++)
2401 cache->saved_regs[4 + i].addr = vsp;
2402 vsp += 4;
2405 /* If indicated by flag, pop LR as well. */
2406 if (pop_lr)
2408 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2409 vsp += 4;
2412 else if (insn == 0xb0)
2414 /* We could only have updated PC by popping into it; if so, it
2415 will show up as address. Otherwise, copy LR into PC. */
2416 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2417 cache->saved_regs[ARM_PC_REGNUM]
2418 = cache->saved_regs[ARM_LR_REGNUM];
2420 /* We're done. */
2421 break;
2423 else if (insn == 0xb1)
2425 int mask = *entry++;
2426 int i;
2428 /* All-zero mask and mask >= 16 is "spare". */
2429 if (mask == 0 || mask >= 16)
2430 return NULL;
2432 /* Pop r0..r3 under mask. */
2433 for (i = 0; i < 4; i++)
2434 if (mask & (1 << i))
2436 cache->saved_regs[i].addr = vsp;
2437 vsp += 4;
2440 else if (insn == 0xb2)
2442 ULONGEST offset = 0;
2443 unsigned shift = 0;
2447 offset |= (*entry & 0x7f) << shift;
2448 shift += 7;
2450 while (*entry++ & 0x80);
2452 vsp += 0x204 + (offset << 2);
2454 else if (insn == 0xb3)
2456 int start = *entry >> 4;
2457 int count = (*entry++) & 0xf;
2458 int i;
2460 /* Only registers D0..D15 are valid here. */
2461 if (start + count >= 16)
2462 return NULL;
2464 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2465 for (i = 0; i <= count; i++)
2467 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2468 vsp += 8;
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2472 vsp += 4;
2474 else if ((insn & 0xf8) == 0xb8)
2476 int count = insn & 0x7;
2477 int i;
2479 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2480 for (i = 0; i <= count; i++)
2482 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2483 vsp += 8;
2486 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2487 vsp += 4;
2489 else if (insn == 0xc6)
2491 int start = *entry >> 4;
2492 int count = (*entry++) & 0xf;
2493 int i;
2495 /* Only registers WR0..WR15 are valid. */
2496 if (start + count >= 16)
2497 return NULL;
2499 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2500 for (i = 0; i <= count; i++)
2502 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2503 vsp += 8;
2506 else if (insn == 0xc7)
2508 int mask = *entry++;
2509 int i;
2511 /* All-zero mask and mask >= 16 is "spare". */
2512 if (mask == 0 || mask >= 16)
2513 return NULL;
2515 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2516 for (i = 0; i < 4; i++)
2517 if (mask & (1 << i))
2519 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2520 vsp += 4;
2523 else if ((insn & 0xf8) == 0xc0)
2525 int count = insn & 0x7;
2526 int i;
2528 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2529 for (i = 0; i <= count; i++)
2531 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2532 vsp += 8;
2535 else if (insn == 0xc8)
2537 int start = *entry >> 4;
2538 int count = (*entry++) & 0xf;
2539 int i;
2541 /* Only registers D0..D31 are valid. */
2542 if (start + count >= 16)
2543 return NULL;
2545 /* Pop VFP double-precision registers
2546 D[16+start]..D[16+start+count]. */
2547 for (i = 0; i <= count; i++)
2549 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2550 vsp += 8;
2553 else if (insn == 0xc9)
2555 int start = *entry >> 4;
2556 int count = (*entry++) & 0xf;
2557 int i;
2559 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2560 for (i = 0; i <= count; i++)
2562 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2563 vsp += 8;
2566 else if ((insn & 0xf8) == 0xd0)
2568 int count = insn & 0x7;
2569 int i;
2571 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2572 for (i = 0; i <= count; i++)
2574 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2575 vsp += 8;
2578 else
2580 /* Everything else is "spare". */
2581 return NULL;
2585 /* If we restore SP from a register, assume this was the frame register.
2586 Otherwise just fall back to SP as frame register. */
2587 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2588 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2589 else
2590 cache->framereg = ARM_SP_REGNUM;
2592 /* Determine offset to previous frame. */
2593 cache->framesize
2594 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2596 /* We already got the previous SP. */
2597 cache->prev_sp = vsp;
2599 return cache;
2602 /* Unwinding via ARM exception table entries. Note that the sniffer
2603 already computes a filled-in prologue cache, which is then used
2604 with the same arm_prologue_this_id and arm_prologue_prev_register
2605 routines also used for prologue-parsing based unwinding. */
2607 static int
2608 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2609 struct frame_info *this_frame,
2610 void **this_prologue_cache)
2612 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2613 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2614 CORE_ADDR addr_in_block, exidx_region, func_start;
2615 struct arm_prologue_cache *cache;
2616 gdb_byte *entry;
2618 /* See if we have an ARM exception table entry covering this address. */
2619 addr_in_block = get_frame_address_in_block (this_frame);
2620 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2621 if (!entry)
2622 return 0;
2624 /* The ARM exception table does not describe unwind information
2625 for arbitrary PC values, but is guaranteed to be correct only
2626 at call sites. We have to decide here whether we want to use
2627 ARM exception table information for this frame, or fall back
2628 to using prologue parsing. (Note that if we have DWARF CFI,
2629 this sniffer isn't even called -- CFI is always preferred.)
2631 Before we make this decision, however, we check whether we
2632 actually have *symbol* information for the current frame.
2633 If not, prologue parsing would not work anyway, so we might
2634 as well use the exception table and hope for the best. */
2635 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2637 int exc_valid = 0;
2639 /* If the next frame is "normal", we are at a call site in this
2640 frame, so exception information is guaranteed to be valid. */
2641 if (get_next_frame (this_frame)
2642 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2643 exc_valid = 1;
2645 /* We also assume exception information is valid if we're currently
2646 blocked in a system call. The system library is supposed to
2647 ensure this, so that e.g. pthread cancellation works. */
2648 if (arm_frame_is_thumb (this_frame))
2650 ULONGEST insn;
2652 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2653 2, byte_order_for_code, &insn)
2654 && (insn & 0xff00) == 0xdf00 /* svc */)
2655 exc_valid = 1;
2657 else
2659 ULONGEST insn;
2661 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2662 4, byte_order_for_code, &insn)
2663 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2664 exc_valid = 1;
2667 /* Bail out if we don't know that exception information is valid. */
2668 if (!exc_valid)
2669 return 0;
2671 /* The ARM exception index does not mark the *end* of the region
2672 covered by the entry, and some functions will not have any entry.
2673 To correctly recognize the end of the covered region, the linker
2674 should have inserted dummy records with a CANTUNWIND marker.
2676 Unfortunately, current versions of GNU ld do not reliably do
2677 this, and thus we may have found an incorrect entry above.
2678 As a (temporary) sanity check, we only use the entry if it
2679 lies *within* the bounds of the function. Note that this check
2680 might reject perfectly valid entries that just happen to cover
2681 multiple functions; therefore this check ought to be removed
2682 once the linker is fixed. */
2683 if (func_start > exidx_region)
2684 return 0;
2687 /* Decode the list of unwinding instructions into a prologue cache.
2688 Note that this may fail due to e.g. a "refuse to unwind" code. */
2689 cache = arm_exidx_fill_cache (this_frame, entry);
2690 if (!cache)
2691 return 0;
2693 *this_prologue_cache = cache;
2694 return 1;
2697 struct frame_unwind arm_exidx_unwind = {
2698 NORMAL_FRAME,
2699 default_frame_unwind_stop_reason,
2700 arm_prologue_this_id,
2701 arm_prologue_prev_register,
2702 NULL,
2703 arm_exidx_unwind_sniffer
2706 static struct arm_prologue_cache *
2707 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2709 struct arm_prologue_cache *cache;
2710 int reg;
2712 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2713 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2715 /* Still rely on the offset calculated from prologue. */
2716 arm_scan_prologue (this_frame, cache);
2718 /* Since we are in epilogue, the SP has been restored. */
2719 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2721 /* Calculate actual addresses of saved registers using offsets
2722 determined by arm_scan_prologue. */
2723 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2724 if (trad_frame_addr_p (cache->saved_regs, reg))
2725 cache->saved_regs[reg].addr += cache->prev_sp;
2727 return cache;
2730 /* Implementation of function hook 'this_id' in
2731 'struct frame_uwnind' for epilogue unwinder. */
2733 static void
2734 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2735 void **this_cache,
2736 struct frame_id *this_id)
2738 struct arm_prologue_cache *cache;
2739 CORE_ADDR pc, func;
2741 if (*this_cache == NULL)
2742 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2743 cache = (struct arm_prologue_cache *) *this_cache;
2745 /* Use function start address as part of the frame ID. If we cannot
2746 identify the start address (due to missing symbol information),
2747 fall back to just using the current PC. */
2748 pc = get_frame_pc (this_frame);
2749 func = get_frame_func (this_frame);
2750 if (func == 0)
2751 func = pc;
2753 (*this_id) = frame_id_build (cache->prev_sp, pc);
2756 /* Implementation of function hook 'prev_register' in
2757 'struct frame_uwnind' for epilogue unwinder. */
2759 static struct value *
2760 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2761 void **this_cache, int regnum)
2763 if (*this_cache == NULL)
2764 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2766 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2769 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2770 CORE_ADDR pc);
2771 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2772 CORE_ADDR pc);
2774 /* Implementation of function hook 'sniffer' in
2775 'struct frame_uwnind' for epilogue unwinder. */
2777 static int
2778 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2782 if (frame_relative_level (this_frame) == 0)
2784 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2785 CORE_ADDR pc = get_frame_pc (this_frame);
2787 if (arm_frame_is_thumb (this_frame))
2788 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2789 else
2790 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2792 else
2793 return 0;
2796 /* Frame unwinder from epilogue. */
2798 static const struct frame_unwind arm_epilogue_frame_unwind =
2800 NORMAL_FRAME,
2801 default_frame_unwind_stop_reason,
2802 arm_epilogue_frame_this_id,
2803 arm_epilogue_frame_prev_register,
2804 NULL,
2805 arm_epilogue_frame_sniffer,
2808 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2809 trampoline, return the target PC. Otherwise return 0.
2811 void call0a (char c, short s, int i, long l) {}
2813 int main (void)
2815 (*pointer_to_call0a) (c, s, i, l);
2818 Instead of calling a stub library function _call_via_xx (xx is
2819 the register name), GCC may inline the trampoline in the object
2820 file as below (register r2 has the address of call0a).
2822 .global main
2823 .type main, %function
2825 bl .L1
2827 .size main, .-main
2829 .L1:
2830 bx r2
2832 The trampoline 'bx r2' doesn't belong to main. */
2834 static CORE_ADDR
2835 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2837 /* The heuristics of recognizing such trampoline is that FRAME is
2838 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2839 if (arm_frame_is_thumb (frame))
2841 gdb_byte buf[2];
2843 if (target_read_memory (pc, buf, 2) == 0)
2845 struct gdbarch *gdbarch = get_frame_arch (frame);
2846 enum bfd_endian byte_order_for_code
2847 = gdbarch_byte_order_for_code (gdbarch);
2848 uint16_t insn
2849 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2851 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2853 CORE_ADDR dest
2854 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2856 /* Clear the LSB so that gdb core sets step-resume
2857 breakpoint at the right address. */
2858 return UNMAKE_THUMB_ADDR (dest);
2863 return 0;
2866 static struct arm_prologue_cache *
2867 arm_make_stub_cache (struct frame_info *this_frame)
2869 struct arm_prologue_cache *cache;
2871 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2872 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2874 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2876 return cache;
2879 /* Our frame ID for a stub frame is the current SP and LR. */
2881 static void
2882 arm_stub_this_id (struct frame_info *this_frame,
2883 void **this_cache,
2884 struct frame_id *this_id)
2886 struct arm_prologue_cache *cache;
2888 if (*this_cache == NULL)
2889 *this_cache = arm_make_stub_cache (this_frame);
2890 cache = (struct arm_prologue_cache *) *this_cache;
2892 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2895 static int
2896 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2897 struct frame_info *this_frame,
2898 void **this_prologue_cache)
2900 CORE_ADDR addr_in_block;
2901 gdb_byte dummy[4];
2902 CORE_ADDR pc, start_addr;
2903 const char *name;
2905 addr_in_block = get_frame_address_in_block (this_frame);
2906 pc = get_frame_pc (this_frame);
2907 if (in_plt_section (addr_in_block)
2908 /* We also use the stub winder if the target memory is unreadable
2909 to avoid having the prologue unwinder trying to read it. */
2910 || target_read_memory (pc, dummy, 4) != 0)
2911 return 1;
2913 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2914 && arm_skip_bx_reg (this_frame, pc) != 0)
2915 return 1;
2917 return 0;
2920 struct frame_unwind arm_stub_unwind = {
2921 NORMAL_FRAME,
2922 default_frame_unwind_stop_reason,
2923 arm_stub_this_id,
2924 arm_prologue_prev_register,
2925 NULL,
2926 arm_stub_unwind_sniffer
2929 /* Put here the code to store, into CACHE->saved_regs, the addresses
2930 of the saved registers of frame described by THIS_FRAME. CACHE is
2931 returned. */
2933 static struct arm_prologue_cache *
2934 arm_m_exception_cache (struct frame_info *this_frame)
2936 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2937 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2938 struct arm_prologue_cache *cache;
2939 CORE_ADDR unwound_sp;
2940 LONGEST xpsr;
2942 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2943 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2945 unwound_sp = get_frame_register_unsigned (this_frame,
2946 ARM_SP_REGNUM);
2948 /* The hardware saves eight 32-bit words, comprising xPSR,
2949 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2950 "B1.5.6 Exception entry behavior" in
2951 "ARMv7-M Architecture Reference Manual". */
2952 cache->saved_regs[0].addr = unwound_sp;
2953 cache->saved_regs[1].addr = unwound_sp + 4;
2954 cache->saved_regs[2].addr = unwound_sp + 8;
2955 cache->saved_regs[3].addr = unwound_sp + 12;
2956 cache->saved_regs[12].addr = unwound_sp + 16;
2957 cache->saved_regs[14].addr = unwound_sp + 20;
2958 cache->saved_regs[15].addr = unwound_sp + 24;
2959 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2961 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2962 aligner between the top of the 32-byte stack frame and the
2963 previous context's stack pointer. */
2964 cache->prev_sp = unwound_sp + 32;
2965 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2966 && (xpsr & (1 << 9)) != 0)
2967 cache->prev_sp += 4;
2969 return cache;
2972 /* Implementation of function hook 'this_id' in
2973 'struct frame_uwnind'. */
2975 static void
2976 arm_m_exception_this_id (struct frame_info *this_frame,
2977 void **this_cache,
2978 struct frame_id *this_id)
2980 struct arm_prologue_cache *cache;
2982 if (*this_cache == NULL)
2983 *this_cache = arm_m_exception_cache (this_frame);
2984 cache = (struct arm_prologue_cache *) *this_cache;
2986 /* Our frame ID for a stub frame is the current SP and LR. */
2987 *this_id = frame_id_build (cache->prev_sp,
2988 get_frame_pc (this_frame));
2991 /* Implementation of function hook 'prev_register' in
2992 'struct frame_uwnind'. */
2994 static struct value *
2995 arm_m_exception_prev_register (struct frame_info *this_frame,
2996 void **this_cache,
2997 int prev_regnum)
2999 struct arm_prologue_cache *cache;
3001 if (*this_cache == NULL)
3002 *this_cache = arm_m_exception_cache (this_frame);
3003 cache = (struct arm_prologue_cache *) *this_cache;
3005 /* The value was already reconstructed into PREV_SP. */
3006 if (prev_regnum == ARM_SP_REGNUM)
3007 return frame_unwind_got_constant (this_frame, prev_regnum,
3008 cache->prev_sp);
3010 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3011 prev_regnum);
3014 /* Implementation of function hook 'sniffer' in
3015 'struct frame_uwnind'. */
3017 static int
3018 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3019 struct frame_info *this_frame,
3020 void **this_prologue_cache)
3022 CORE_ADDR this_pc = get_frame_pc (this_frame);
3024 /* No need to check is_m; this sniffer is only registered for
3025 M-profile architectures. */
3027 /* Check if exception frame returns to a magic PC value. */
3028 return arm_m_addr_is_magic (this_pc);
3031 /* Frame unwinder for M-profile exceptions. */
3033 struct frame_unwind arm_m_exception_unwind =
3035 SIGTRAMP_FRAME,
3036 default_frame_unwind_stop_reason,
3037 arm_m_exception_this_id,
3038 arm_m_exception_prev_register,
3039 NULL,
3040 arm_m_exception_unwind_sniffer
3043 static CORE_ADDR
3044 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3046 struct arm_prologue_cache *cache;
3048 if (*this_cache == NULL)
3049 *this_cache = arm_make_prologue_cache (this_frame);
3050 cache = (struct arm_prologue_cache *) *this_cache;
3052 return cache->prev_sp - cache->framesize;
3055 struct frame_base arm_normal_base = {
3056 &arm_prologue_unwind,
3057 arm_normal_frame_base,
3058 arm_normal_frame_base,
3059 arm_normal_frame_base
3062 static struct value *
3063 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3064 int regnum)
3066 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3067 CORE_ADDR lr, cpsr;
3068 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3070 switch (regnum)
3072 case ARM_PC_REGNUM:
3073 /* The PC is normally copied from the return column, which
3074 describes saves of LR. However, that version may have an
3075 extra bit set to indicate Thumb state. The bit is not
3076 part of the PC. */
3077 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3078 return frame_unwind_got_constant (this_frame, regnum,
3079 arm_addr_bits_remove (gdbarch, lr));
3081 case ARM_PS_REGNUM:
3082 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3083 cpsr = get_frame_register_unsigned (this_frame, regnum);
3084 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3085 if (IS_THUMB_ADDR (lr))
3086 cpsr |= t_bit;
3087 else
3088 cpsr &= ~t_bit;
3089 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3091 default:
3092 internal_error (__FILE__, __LINE__,
3093 _("Unexpected register %d"), regnum);
3097 static void
3098 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3099 struct dwarf2_frame_state_reg *reg,
3100 struct frame_info *this_frame)
3102 switch (regnum)
3104 case ARM_PC_REGNUM:
3105 case ARM_PS_REGNUM:
3106 reg->how = DWARF2_FRAME_REG_FN;
3107 reg->loc.fn = arm_dwarf2_prev_register;
3108 break;
3109 case ARM_SP_REGNUM:
3110 reg->how = DWARF2_FRAME_REG_CFA;
3111 break;
3115 /* Implement the stack_frame_destroyed_p gdbarch method. */
3117 static int
3118 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3120 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3121 unsigned int insn, insn2;
3122 int found_return = 0, found_stack_adjust = 0;
3123 CORE_ADDR func_start, func_end;
3124 CORE_ADDR scan_pc;
3125 gdb_byte buf[4];
3127 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3128 return 0;
3130 /* The epilogue is a sequence of instructions along the following lines:
3132 - add stack frame size to SP or FP
3133 - [if frame pointer used] restore SP from FP
3134 - restore registers from SP [may include PC]
3135 - a return-type instruction [if PC wasn't already restored]
3137 In a first pass, we scan forward from the current PC and verify the
3138 instructions we find as compatible with this sequence, ending in a
3139 return instruction.
3141 However, this is not sufficient to distinguish indirect function calls
3142 within a function from indirect tail calls in the epilogue in some cases.
3143 Therefore, if we didn't already find any SP-changing instruction during
3144 forward scan, we add a backward scanning heuristic to ensure we actually
3145 are in the epilogue. */
3147 scan_pc = pc;
3148 while (scan_pc < func_end && !found_return)
3150 if (target_read_memory (scan_pc, buf, 2))
3151 break;
3153 scan_pc += 2;
3154 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3156 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3157 found_return = 1;
3158 else if (insn == 0x46f7) /* mov pc, lr */
3159 found_return = 1;
3160 else if (thumb_instruction_restores_sp (insn))
3162 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3163 found_return = 1;
3165 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3167 if (target_read_memory (scan_pc, buf, 2))
3168 break;
3170 scan_pc += 2;
3171 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3173 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3175 if (insn2 & 0x8000) /* <registers> include PC. */
3176 found_return = 1;
3178 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3179 && (insn2 & 0x0fff) == 0x0b04)
3181 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3182 found_return = 1;
3184 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3185 && (insn2 & 0x0e00) == 0x0a00)
3187 else
3188 break;
3190 else
3191 break;
3194 if (!found_return)
3195 return 0;
3197 /* Since any instruction in the epilogue sequence, with the possible
3198 exception of return itself, updates the stack pointer, we need to
3199 scan backwards for at most one instruction. Try either a 16-bit or
3200 a 32-bit instruction. This is just a heuristic, so we do not worry
3201 too much about false positives. */
3203 if (pc - 4 < func_start)
3204 return 0;
3205 if (target_read_memory (pc - 4, buf, 4))
3206 return 0;
3208 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3209 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3211 if (thumb_instruction_restores_sp (insn2))
3212 found_stack_adjust = 1;
3213 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3214 found_stack_adjust = 1;
3215 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3216 && (insn2 & 0x0fff) == 0x0b04)
3217 found_stack_adjust = 1;
3218 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3219 && (insn2 & 0x0e00) == 0x0a00)
3220 found_stack_adjust = 1;
3222 return found_stack_adjust;
3225 static int
3226 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3228 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3229 unsigned int insn;
3230 int found_return;
3231 CORE_ADDR func_start, func_end;
3233 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3234 return 0;
3236 /* We are in the epilogue if the previous instruction was a stack
3237 adjustment and the next instruction is a possible return (bx, mov
3238 pc, or pop). We could have to scan backwards to find the stack
3239 adjustment, or forwards to find the return, but this is a decent
3240 approximation. First scan forwards. */
3242 found_return = 0;
3243 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3244 if (bits (insn, 28, 31) != INST_NV)
3246 if ((insn & 0x0ffffff0) == 0x012fff10)
3247 /* BX. */
3248 found_return = 1;
3249 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3250 /* MOV PC. */
3251 found_return = 1;
3252 else if ((insn & 0x0fff0000) == 0x08bd0000
3253 && (insn & 0x0000c000) != 0)
3254 /* POP (LDMIA), including PC or LR. */
3255 found_return = 1;
3258 if (!found_return)
3259 return 0;
3261 /* Scan backwards. This is just a heuristic, so do not worry about
3262 false positives from mode changes. */
3264 if (pc < func_start + 4)
3265 return 0;
3267 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3268 if (arm_instruction_restores_sp (insn))
3269 return 1;
3271 return 0;
3274 /* Implement the stack_frame_destroyed_p gdbarch method. */
3276 static int
3277 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3279 if (arm_pc_is_thumb (gdbarch, pc))
3280 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3281 else
3282 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3285 /* When arguments must be pushed onto the stack, they go on in reverse
3286 order. The code below implements a FILO (stack) to do this. */
3288 struct stack_item
3290 int len;
3291 struct stack_item *prev;
3292 gdb_byte *data;
3295 static struct stack_item *
3296 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3298 struct stack_item *si;
3299 si = XNEW (struct stack_item);
3300 si->data = (gdb_byte *) xmalloc (len);
3301 si->len = len;
3302 si->prev = prev;
3303 memcpy (si->data, contents, len);
3304 return si;
3307 static struct stack_item *
3308 pop_stack_item (struct stack_item *si)
3310 struct stack_item *dead = si;
3311 si = si->prev;
3312 xfree (dead->data);
3313 xfree (dead);
3314 return si;
3317 /* Implement the gdbarch type alignment method, overrides the generic
3318 alignment algorithm for anything that is arm specific. */
3320 static ULONGEST
3321 arm_type_align (gdbarch *gdbarch, struct type *t)
3323 t = check_typedef (t);
3324 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3326 /* Use the natural alignment for vector types (the same for
3327 scalar type), but the maximum alignment is 64-bit. */
3328 if (TYPE_LENGTH (t) > 8)
3329 return 8;
3330 else
3331 return TYPE_LENGTH (t);
3334 /* Allow the common code to calculate the alignment. */
3335 return 0;
3338 /* Possible base types for a candidate for passing and returning in
3339 VFP registers. */
3341 enum arm_vfp_cprc_base_type
3343 VFP_CPRC_UNKNOWN,
3344 VFP_CPRC_SINGLE,
3345 VFP_CPRC_DOUBLE,
3346 VFP_CPRC_VEC64,
3347 VFP_CPRC_VEC128
3350 /* The length of one element of base type B. */
3352 static unsigned
3353 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3355 switch (b)
3357 case VFP_CPRC_SINGLE:
3358 return 4;
3359 case VFP_CPRC_DOUBLE:
3360 return 8;
3361 case VFP_CPRC_VEC64:
3362 return 8;
3363 case VFP_CPRC_VEC128:
3364 return 16;
3365 default:
3366 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3367 (int) b);
3371 /* The character ('s', 'd' or 'q') for the type of VFP register used
3372 for passing base type B. */
3374 static int
3375 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3377 switch (b)
3379 case VFP_CPRC_SINGLE:
3380 return 's';
3381 case VFP_CPRC_DOUBLE:
3382 return 'd';
3383 case VFP_CPRC_VEC64:
3384 return 'd';
3385 case VFP_CPRC_VEC128:
3386 return 'q';
3387 default:
3388 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3389 (int) b);
3393 /* Determine whether T may be part of a candidate for passing and
3394 returning in VFP registers, ignoring the limit on the total number
3395 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3396 classification of the first valid component found; if it is not
3397 VFP_CPRC_UNKNOWN, all components must have the same classification
3398 as *BASE_TYPE. If it is found that T contains a type not permitted
3399 for passing and returning in VFP registers, a type differently
3400 classified from *BASE_TYPE, or two types differently classified
3401 from each other, return -1, otherwise return the total number of
3402 base-type elements found (possibly 0 in an empty structure or
3403 array). Vector types are not currently supported, matching the
3404 generic AAPCS support. */
3406 static int
3407 arm_vfp_cprc_sub_candidate (struct type *t,
3408 enum arm_vfp_cprc_base_type *base_type)
3410 t = check_typedef (t);
3411 switch (TYPE_CODE (t))
3413 case TYPE_CODE_FLT:
3414 switch (TYPE_LENGTH (t))
3416 case 4:
3417 if (*base_type == VFP_CPRC_UNKNOWN)
3418 *base_type = VFP_CPRC_SINGLE;
3419 else if (*base_type != VFP_CPRC_SINGLE)
3420 return -1;
3421 return 1;
3423 case 8:
3424 if (*base_type == VFP_CPRC_UNKNOWN)
3425 *base_type = VFP_CPRC_DOUBLE;
3426 else if (*base_type != VFP_CPRC_DOUBLE)
3427 return -1;
3428 return 1;
3430 default:
3431 return -1;
3433 break;
3435 case TYPE_CODE_COMPLEX:
3436 /* Arguments of complex T where T is one of the types float or
3437 double get treated as if they are implemented as:
3439 struct complexT
3441 T real;
3442 T imag;
3446 switch (TYPE_LENGTH (t))
3448 case 8:
3449 if (*base_type == VFP_CPRC_UNKNOWN)
3450 *base_type = VFP_CPRC_SINGLE;
3451 else if (*base_type != VFP_CPRC_SINGLE)
3452 return -1;
3453 return 2;
3455 case 16:
3456 if (*base_type == VFP_CPRC_UNKNOWN)
3457 *base_type = VFP_CPRC_DOUBLE;
3458 else if (*base_type != VFP_CPRC_DOUBLE)
3459 return -1;
3460 return 2;
3462 default:
3463 return -1;
3465 break;
3467 case TYPE_CODE_ARRAY:
3469 if (TYPE_VECTOR (t))
3471 /* A 64-bit or 128-bit containerized vector type are VFP
3472 CPRCs. */
3473 switch (TYPE_LENGTH (t))
3475 case 8:
3476 if (*base_type == VFP_CPRC_UNKNOWN)
3477 *base_type = VFP_CPRC_VEC64;
3478 return 1;
3479 case 16:
3480 if (*base_type == VFP_CPRC_UNKNOWN)
3481 *base_type = VFP_CPRC_VEC128;
3482 return 1;
3483 default:
3484 return -1;
3487 else
3489 int count;
3490 unsigned unitlen;
3492 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3493 base_type);
3494 if (count == -1)
3495 return -1;
3496 if (TYPE_LENGTH (t) == 0)
3498 gdb_assert (count == 0);
3499 return 0;
3501 else if (count == 0)
3502 return -1;
3503 unitlen = arm_vfp_cprc_unit_length (*base_type);
3504 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3505 return TYPE_LENGTH (t) / unitlen;
3508 break;
3510 case TYPE_CODE_STRUCT:
3512 int count = 0;
3513 unsigned unitlen;
3514 int i;
3515 for (i = 0; i < TYPE_NFIELDS (t); i++)
3517 int sub_count = 0;
3519 if (!field_is_static (&TYPE_FIELD (t, i)))
3520 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3521 base_type);
3522 if (sub_count == -1)
3523 return -1;
3524 count += sub_count;
3526 if (TYPE_LENGTH (t) == 0)
3528 gdb_assert (count == 0);
3529 return 0;
3531 else if (count == 0)
3532 return -1;
3533 unitlen = arm_vfp_cprc_unit_length (*base_type);
3534 if (TYPE_LENGTH (t) != unitlen * count)
3535 return -1;
3536 return count;
3539 case TYPE_CODE_UNION:
3541 int count = 0;
3542 unsigned unitlen;
3543 int i;
3544 for (i = 0; i < TYPE_NFIELDS (t); i++)
3546 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3547 base_type);
3548 if (sub_count == -1)
3549 return -1;
3550 count = (count > sub_count ? count : sub_count);
3552 if (TYPE_LENGTH (t) == 0)
3554 gdb_assert (count == 0);
3555 return 0;
3557 else if (count == 0)
3558 return -1;
3559 unitlen = arm_vfp_cprc_unit_length (*base_type);
3560 if (TYPE_LENGTH (t) != unitlen * count)
3561 return -1;
3562 return count;
3565 default:
3566 break;
3569 return -1;
3572 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3573 if passed to or returned from a non-variadic function with the VFP
3574 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3575 *BASE_TYPE to the base type for T and *COUNT to the number of
3576 elements of that base type before returning. */
3578 static int
3579 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3580 int *count)
3582 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3583 int c = arm_vfp_cprc_sub_candidate (t, &b);
3584 if (c <= 0 || c > 4)
3585 return 0;
3586 *base_type = b;
3587 *count = c;
3588 return 1;
3591 /* Return 1 if the VFP ABI should be used for passing arguments to and
3592 returning values from a function of type FUNC_TYPE, 0
3593 otherwise. */
3595 static int
3596 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3598 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3599 /* Variadic functions always use the base ABI. Assume that functions
3600 without debug info are not variadic. */
3601 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3602 return 0;
3603 /* The VFP ABI is only supported as a variant of AAPCS. */
3604 if (tdep->arm_abi != ARM_ABI_AAPCS)
3605 return 0;
3606 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3609 /* We currently only support passing parameters in integer registers, which
3610 conforms with GCC's default model, and VFP argument passing following
3611 the VFP variant of AAPCS. Several other variants exist and
3612 we should probably support some of them based on the selected ABI. */
3614 static CORE_ADDR
3615 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3616 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3617 struct value **args, CORE_ADDR sp,
3618 function_call_return_method return_method,
3619 CORE_ADDR struct_addr)
3621 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3622 int argnum;
3623 int argreg;
3624 int nstack;
3625 struct stack_item *si = NULL;
3626 int use_vfp_abi;
3627 struct type *ftype;
3628 unsigned vfp_regs_free = (1 << 16) - 1;
3630 /* Determine the type of this function and whether the VFP ABI
3631 applies. */
3632 ftype = check_typedef (value_type (function));
3633 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3634 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3635 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3637 /* Set the return address. For the ARM, the return breakpoint is
3638 always at BP_ADDR. */
3639 if (arm_pc_is_thumb (gdbarch, bp_addr))
3640 bp_addr |= 1;
3641 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3643 /* Walk through the list of args and determine how large a temporary
3644 stack is required. Need to take care here as structs may be
3645 passed on the stack, and we have to push them. */
3646 nstack = 0;
3648 argreg = ARM_A1_REGNUM;
3649 nstack = 0;
3651 /* The struct_return pointer occupies the first parameter
3652 passing register. */
3653 if (return_method == return_method_struct)
3655 if (arm_debug)
3656 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3657 gdbarch_register_name (gdbarch, argreg),
3658 paddress (gdbarch, struct_addr));
3659 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3660 argreg++;
3663 for (argnum = 0; argnum < nargs; argnum++)
3665 int len;
3666 struct type *arg_type;
3667 struct type *target_type;
3668 enum type_code typecode;
3669 const bfd_byte *val;
3670 int align;
3671 enum arm_vfp_cprc_base_type vfp_base_type;
3672 int vfp_base_count;
3673 int may_use_core_reg = 1;
3675 arg_type = check_typedef (value_type (args[argnum]));
3676 len = TYPE_LENGTH (arg_type);
3677 target_type = TYPE_TARGET_TYPE (arg_type);
3678 typecode = TYPE_CODE (arg_type);
3679 val = value_contents (args[argnum]);
3681 align = type_align (arg_type);
3682 /* Round alignment up to a whole number of words. */
3683 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3684 /* Different ABIs have different maximum alignments. */
3685 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3687 /* The APCS ABI only requires word alignment. */
3688 align = INT_REGISTER_SIZE;
3690 else
3692 /* The AAPCS requires at most doubleword alignment. */
3693 if (align > INT_REGISTER_SIZE * 2)
3694 align = INT_REGISTER_SIZE * 2;
3697 if (use_vfp_abi
3698 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3699 &vfp_base_count))
3701 int regno;
3702 int unit_length;
3703 int shift;
3704 unsigned mask;
3706 /* Because this is a CPRC it cannot go in a core register or
3707 cause a core register to be skipped for alignment.
3708 Either it goes in VFP registers and the rest of this loop
3709 iteration is skipped for this argument, or it goes on the
3710 stack (and the stack alignment code is correct for this
3711 case). */
3712 may_use_core_reg = 0;
3714 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3715 shift = unit_length / 4;
3716 mask = (1 << (shift * vfp_base_count)) - 1;
3717 for (regno = 0; regno < 16; regno += shift)
3718 if (((vfp_regs_free >> regno) & mask) == mask)
3719 break;
3721 if (regno < 16)
3723 int reg_char;
3724 int reg_scaled;
3725 int i;
3727 vfp_regs_free &= ~(mask << regno);
3728 reg_scaled = regno / shift;
3729 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3730 for (i = 0; i < vfp_base_count; i++)
3732 char name_buf[4];
3733 int regnum;
3734 if (reg_char == 'q')
3735 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3736 val + i * unit_length);
3737 else
3739 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3740 reg_char, reg_scaled + i);
3741 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3742 strlen (name_buf));
3743 regcache->cooked_write (regnum, val + i * unit_length);
3746 continue;
3748 else
3750 /* This CPRC could not go in VFP registers, so all VFP
3751 registers are now marked as used. */
3752 vfp_regs_free = 0;
3756 /* Push stack padding for dowubleword alignment. */
3757 if (nstack & (align - 1))
3759 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3760 nstack += INT_REGISTER_SIZE;
3763 /* Doubleword aligned quantities must go in even register pairs. */
3764 if (may_use_core_reg
3765 && argreg <= ARM_LAST_ARG_REGNUM
3766 && align > INT_REGISTER_SIZE
3767 && argreg & 1)
3768 argreg++;
3770 /* If the argument is a pointer to a function, and it is a
3771 Thumb function, create a LOCAL copy of the value and set
3772 the THUMB bit in it. */
3773 if (TYPE_CODE_PTR == typecode
3774 && target_type != NULL
3775 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3777 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3778 if (arm_pc_is_thumb (gdbarch, regval))
3780 bfd_byte *copy = (bfd_byte *) alloca (len);
3781 store_unsigned_integer (copy, len, byte_order,
3782 MAKE_THUMB_ADDR (regval));
3783 val = copy;
3787 /* Copy the argument to general registers or the stack in
3788 register-sized pieces. Large arguments are split between
3789 registers and stack. */
3790 while (len > 0)
3792 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3793 CORE_ADDR regval
3794 = extract_unsigned_integer (val, partial_len, byte_order);
3796 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3798 /* The argument is being passed in a general purpose
3799 register. */
3800 if (byte_order == BFD_ENDIAN_BIG)
3801 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3802 if (arm_debug)
3803 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3804 argnum,
3805 gdbarch_register_name
3806 (gdbarch, argreg),
3807 phex (regval, INT_REGISTER_SIZE));
3808 regcache_cooked_write_unsigned (regcache, argreg, regval);
3809 argreg++;
3811 else
3813 gdb_byte buf[INT_REGISTER_SIZE];
3815 memset (buf, 0, sizeof (buf));
3816 store_unsigned_integer (buf, partial_len, byte_order, regval);
3818 /* Push the arguments onto the stack. */
3819 if (arm_debug)
3820 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3821 argnum, nstack);
3822 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3823 nstack += INT_REGISTER_SIZE;
3826 len -= partial_len;
3827 val += partial_len;
3830 /* If we have an odd number of words to push, then decrement the stack
3831 by one word now, so first stack argument will be dword aligned. */
3832 if (nstack & 4)
3833 sp -= 4;
3835 while (si)
3837 sp -= si->len;
3838 write_memory (sp, si->data, si->len);
3839 si = pop_stack_item (si);
3842 /* Finally, update teh SP register. */
3843 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3845 return sp;
3849 /* Always align the frame to an 8-byte boundary. This is required on
3850 some platforms and harmless on the rest. */
3852 static CORE_ADDR
3853 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3855 /* Align the stack to eight bytes. */
3856 return sp & ~ (CORE_ADDR) 7;
3859 static void
3860 print_fpu_flags (struct ui_file *file, int flags)
3862 if (flags & (1 << 0))
3863 fputs_filtered ("IVO ", file);
3864 if (flags & (1 << 1))
3865 fputs_filtered ("DVZ ", file);
3866 if (flags & (1 << 2))
3867 fputs_filtered ("OFL ", file);
3868 if (flags & (1 << 3))
3869 fputs_filtered ("UFL ", file);
3870 if (flags & (1 << 4))
3871 fputs_filtered ("INX ", file);
3872 fputc_filtered ('\n', file);
3875 /* Print interesting information about the floating point processor
3876 (if present) or emulator. */
3877 static void
3878 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3879 struct frame_info *frame, const char *args)
3881 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3882 int type;
3884 type = (status >> 24) & 127;
3885 if (status & (1 << 31))
3886 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3887 else
3888 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3889 /* i18n: [floating point unit] mask */
3890 fputs_filtered (_("mask: "), file);
3891 print_fpu_flags (file, status >> 16);
3892 /* i18n: [floating point unit] flags */
3893 fputs_filtered (_("flags: "), file);
3894 print_fpu_flags (file, status);
3897 /* Construct the ARM extended floating point type. */
3898 static struct type *
3899 arm_ext_type (struct gdbarch *gdbarch)
3901 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3903 if (!tdep->arm_ext_type)
3904 tdep->arm_ext_type
3905 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3906 floatformats_arm_ext);
3908 return tdep->arm_ext_type;
3911 static struct type *
3912 arm_neon_double_type (struct gdbarch *gdbarch)
3914 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3916 if (tdep->neon_double_type == NULL)
3918 struct type *t, *elem;
3920 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3921 TYPE_CODE_UNION);
3922 elem = builtin_type (gdbarch)->builtin_uint8;
3923 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3924 elem = builtin_type (gdbarch)->builtin_uint16;
3925 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3926 elem = builtin_type (gdbarch)->builtin_uint32;
3927 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3928 elem = builtin_type (gdbarch)->builtin_uint64;
3929 append_composite_type_field (t, "u64", elem);
3930 elem = builtin_type (gdbarch)->builtin_float;
3931 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3932 elem = builtin_type (gdbarch)->builtin_double;
3933 append_composite_type_field (t, "f64", elem);
3935 TYPE_VECTOR (t) = 1;
3936 TYPE_NAME (t) = "neon_d";
3937 tdep->neon_double_type = t;
3940 return tdep->neon_double_type;
3943 /* FIXME: The vector types are not correctly ordered on big-endian
3944 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3945 bits of d0 - regardless of what unit size is being held in d0. So
3946 the offset of the first uint8 in d0 is 7, but the offset of the
3947 first float is 4. This code works as-is for little-endian
3948 targets. */
3950 static struct type *
3951 arm_neon_quad_type (struct gdbarch *gdbarch)
3953 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3955 if (tdep->neon_quad_type == NULL)
3957 struct type *t, *elem;
3959 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3960 TYPE_CODE_UNION);
3961 elem = builtin_type (gdbarch)->builtin_uint8;
3962 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3963 elem = builtin_type (gdbarch)->builtin_uint16;
3964 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3965 elem = builtin_type (gdbarch)->builtin_uint32;
3966 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3967 elem = builtin_type (gdbarch)->builtin_uint64;
3968 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3969 elem = builtin_type (gdbarch)->builtin_float;
3970 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3971 elem = builtin_type (gdbarch)->builtin_double;
3972 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3974 TYPE_VECTOR (t) = 1;
3975 TYPE_NAME (t) = "neon_q";
3976 tdep->neon_quad_type = t;
3979 return tdep->neon_quad_type;
3982 /* Return the GDB type object for the "standard" data type of data in
3983 register N. */
3985 static struct type *
3986 arm_register_type (struct gdbarch *gdbarch, int regnum)
3988 int num_regs = gdbarch_num_regs (gdbarch);
3990 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3991 && regnum >= num_regs && regnum < num_regs + 32)
3992 return builtin_type (gdbarch)->builtin_float;
3994 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3995 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3996 return arm_neon_quad_type (gdbarch);
3998 /* If the target description has register information, we are only
3999 in this function so that we can override the types of
4000 double-precision registers for NEON. */
4001 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4003 struct type *t = tdesc_register_type (gdbarch, regnum);
4005 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4006 && TYPE_CODE (t) == TYPE_CODE_FLT
4007 && gdbarch_tdep (gdbarch)->have_neon)
4008 return arm_neon_double_type (gdbarch);
4009 else
4010 return t;
4013 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4015 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4016 return builtin_type (gdbarch)->builtin_void;
4018 return arm_ext_type (gdbarch);
4020 else if (regnum == ARM_SP_REGNUM)
4021 return builtin_type (gdbarch)->builtin_data_ptr;
4022 else if (regnum == ARM_PC_REGNUM)
4023 return builtin_type (gdbarch)->builtin_func_ptr;
4024 else if (regnum >= ARRAY_SIZE (arm_register_names))
4025 /* These registers are only supported on targets which supply
4026 an XML description. */
4027 return builtin_type (gdbarch)->builtin_int0;
4028 else
4029 return builtin_type (gdbarch)->builtin_uint32;
4032 /* Map a DWARF register REGNUM onto the appropriate GDB register
4033 number. */
4035 static int
4036 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4038 /* Core integer regs. */
4039 if (reg >= 0 && reg <= 15)
4040 return reg;
4042 /* Legacy FPA encoding. These were once used in a way which
4043 overlapped with VFP register numbering, so their use is
4044 discouraged, but GDB doesn't support the ARM toolchain
4045 which used them for VFP. */
4046 if (reg >= 16 && reg <= 23)
4047 return ARM_F0_REGNUM + reg - 16;
4049 /* New assignments for the FPA registers. */
4050 if (reg >= 96 && reg <= 103)
4051 return ARM_F0_REGNUM + reg - 96;
4053 /* WMMX register assignments. */
4054 if (reg >= 104 && reg <= 111)
4055 return ARM_WCGR0_REGNUM + reg - 104;
4057 if (reg >= 112 && reg <= 127)
4058 return ARM_WR0_REGNUM + reg - 112;
4060 if (reg >= 192 && reg <= 199)
4061 return ARM_WC0_REGNUM + reg - 192;
4063 /* VFP v2 registers. A double precision value is actually
4064 in d1 rather than s2, but the ABI only defines numbering
4065 for the single precision registers. This will "just work"
4066 in GDB for little endian targets (we'll read eight bytes,
4067 starting in s0 and then progressing to s1), but will be
4068 reversed on big endian targets with VFP. This won't
4069 be a problem for the new Neon quad registers; you're supposed
4070 to use DW_OP_piece for those. */
4071 if (reg >= 64 && reg <= 95)
4073 char name_buf[4];
4075 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4076 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4077 strlen (name_buf));
4080 /* VFP v3 / Neon registers. This range is also used for VFP v2
4081 registers, except that it now describes d0 instead of s0. */
4082 if (reg >= 256 && reg <= 287)
4084 char name_buf[4];
4086 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4087 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4088 strlen (name_buf));
4091 return -1;
4094 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4095 static int
4096 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4098 int reg = regnum;
4099 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4101 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4102 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4104 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4105 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4107 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4108 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4110 if (reg < NUM_GREGS)
4111 return SIM_ARM_R0_REGNUM + reg;
4112 reg -= NUM_GREGS;
4114 if (reg < NUM_FREGS)
4115 return SIM_ARM_FP0_REGNUM + reg;
4116 reg -= NUM_FREGS;
4118 if (reg < NUM_SREGS)
4119 return SIM_ARM_FPS_REGNUM + reg;
4120 reg -= NUM_SREGS;
4122 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4125 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4126 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4127 NULL if an error occurs. BUF is freed. */
4129 static gdb_byte *
4130 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4131 int old_len, int new_len)
4133 gdb_byte *new_buf;
4134 int bytes_to_read = new_len - old_len;
4136 new_buf = (gdb_byte *) xmalloc (new_len);
4137 memcpy (new_buf + bytes_to_read, buf, old_len);
4138 xfree (buf);
4139 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4141 xfree (new_buf);
4142 return NULL;
4144 return new_buf;
4147 /* An IT block is at most the 2-byte IT instruction followed by
4148 four 4-byte instructions. The furthest back we must search to
4149 find an IT block that affects the current instruction is thus
4150 2 + 3 * 4 == 14 bytes. */
4151 #define MAX_IT_BLOCK_PREFIX 14
4153 /* Use a quick scan if there are more than this many bytes of
4154 code. */
4155 #define IT_SCAN_THRESHOLD 32
4157 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4158 A breakpoint in an IT block may not be hit, depending on the
4159 condition flags. */
4160 static CORE_ADDR
4161 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4163 gdb_byte *buf;
4164 char map_type;
4165 CORE_ADDR boundary, func_start;
4166 int buf_len;
4167 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4168 int i, any, last_it, last_it_count;
4170 /* If we are using BKPT breakpoints, none of this is necessary. */
4171 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4172 return bpaddr;
4174 /* ARM mode does not have this problem. */
4175 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4176 return bpaddr;
4178 /* We are setting a breakpoint in Thumb code that could potentially
4179 contain an IT block. The first step is to find how much Thumb
4180 code there is; we do not need to read outside of known Thumb
4181 sequences. */
4182 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4183 if (map_type == 0)
4184 /* Thumb-2 code must have mapping symbols to have a chance. */
4185 return bpaddr;
4187 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4189 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4190 && func_start > boundary)
4191 boundary = func_start;
4193 /* Search for a candidate IT instruction. We have to do some fancy
4194 footwork to distinguish a real IT instruction from the second
4195 half of a 32-bit instruction, but there is no need for that if
4196 there's no candidate. */
4197 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4198 if (buf_len == 0)
4199 /* No room for an IT instruction. */
4200 return bpaddr;
4202 buf = (gdb_byte *) xmalloc (buf_len);
4203 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4204 return bpaddr;
4205 any = 0;
4206 for (i = 0; i < buf_len; i += 2)
4208 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4209 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4211 any = 1;
4212 break;
4216 if (any == 0)
4218 xfree (buf);
4219 return bpaddr;
4222 /* OK, the code bytes before this instruction contain at least one
4223 halfword which resembles an IT instruction. We know that it's
4224 Thumb code, but there are still two possibilities. Either the
4225 halfword really is an IT instruction, or it is the second half of
4226 a 32-bit Thumb instruction. The only way we can tell is to
4227 scan forwards from a known instruction boundary. */
4228 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4230 int definite;
4232 /* There's a lot of code before this instruction. Start with an
4233 optimistic search; it's easy to recognize halfwords that can
4234 not be the start of a 32-bit instruction, and use that to
4235 lock on to the instruction boundaries. */
4236 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4237 if (buf == NULL)
4238 return bpaddr;
4239 buf_len = IT_SCAN_THRESHOLD;
4241 definite = 0;
4242 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4244 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4245 if (thumb_insn_size (inst1) == 2)
4247 definite = 1;
4248 break;
4252 /* At this point, if DEFINITE, BUF[I] is the first place we
4253 are sure that we know the instruction boundaries, and it is far
4254 enough from BPADDR that we could not miss an IT instruction
4255 affecting BPADDR. If ! DEFINITE, give up - start from a
4256 known boundary. */
4257 if (! definite)
4259 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4260 bpaddr - boundary);
4261 if (buf == NULL)
4262 return bpaddr;
4263 buf_len = bpaddr - boundary;
4264 i = 0;
4267 else
4269 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4270 if (buf == NULL)
4271 return bpaddr;
4272 buf_len = bpaddr - boundary;
4273 i = 0;
4276 /* Scan forwards. Find the last IT instruction before BPADDR. */
4277 last_it = -1;
4278 last_it_count = 0;
4279 while (i < buf_len)
4281 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4282 last_it_count--;
4283 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4285 last_it = i;
4286 if (inst1 & 0x0001)
4287 last_it_count = 4;
4288 else if (inst1 & 0x0002)
4289 last_it_count = 3;
4290 else if (inst1 & 0x0004)
4291 last_it_count = 2;
4292 else
4293 last_it_count = 1;
4295 i += thumb_insn_size (inst1);
4298 xfree (buf);
4300 if (last_it == -1)
4301 /* There wasn't really an IT instruction after all. */
4302 return bpaddr;
4304 if (last_it_count < 1)
4305 /* It was too far away. */
4306 return bpaddr;
4308 /* This really is a trouble spot. Move the breakpoint to the IT
4309 instruction. */
4310 return bpaddr - buf_len + last_it;
4313 /* ARM displaced stepping support.
4315 Generally ARM displaced stepping works as follows:
4317 1. When an instruction is to be single-stepped, it is first decoded by
4318 arm_process_displaced_insn. Depending on the type of instruction, it is
4319 then copied to a scratch location, possibly in a modified form. The
4320 copy_* set of functions performs such modification, as necessary. A
4321 breakpoint is placed after the modified instruction in the scratch space
4322 to return control to GDB. Note in particular that instructions which
4323 modify the PC will no longer do so after modification.
4325 2. The instruction is single-stepped, by setting the PC to the scratch
4326 location address, and resuming. Control returns to GDB when the
4327 breakpoint is hit.
4329 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4330 function used for the current instruction. This function's job is to
4331 put the CPU/memory state back to what it would have been if the
4332 instruction had been executed unmodified in its original location. */
4334 /* NOP instruction (mov r0, r0). */
4335 #define ARM_NOP 0xe1a00000
4336 #define THUMB_NOP 0x4600
4338 /* Helper for register reads for displaced stepping. In particular, this
4339 returns the PC as it would be seen by the instruction at its original
4340 location. */
4342 ULONGEST
4343 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4344 int regno)
4346 ULONGEST ret;
4347 CORE_ADDR from = dsc->insn_addr;
4349 if (regno == ARM_PC_REGNUM)
4351 /* Compute pipeline offset:
4352 - When executing an ARM instruction, PC reads as the address of the
4353 current instruction plus 8.
4354 - When executing a Thumb instruction, PC reads as the address of the
4355 current instruction plus 4. */
4357 if (!dsc->is_thumb)
4358 from += 8;
4359 else
4360 from += 4;
4362 if (debug_displaced)
4363 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4364 (unsigned long) from);
4365 return (ULONGEST) from;
4367 else
4369 regcache_cooked_read_unsigned (regs, regno, &ret);
4370 if (debug_displaced)
4371 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4372 regno, (unsigned long) ret);
4373 return ret;
4377 static int
4378 displaced_in_arm_mode (struct regcache *regs)
4380 ULONGEST ps;
4381 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4383 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4385 return (ps & t_bit) == 0;
4388 /* Write to the PC as from a branch instruction. */
4390 static void
4391 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4392 ULONGEST val)
4394 if (!dsc->is_thumb)
4395 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4396 architecture versions < 6. */
4397 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4398 val & ~(ULONGEST) 0x3);
4399 else
4400 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4401 val & ~(ULONGEST) 0x1);
4404 /* Write to the PC as from a branch-exchange instruction. */
4406 static void
4407 bx_write_pc (struct regcache *regs, ULONGEST val)
4409 ULONGEST ps;
4410 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4412 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4414 if ((val & 1) == 1)
4416 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4417 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4419 else if ((val & 2) == 0)
4421 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4422 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4424 else
4426 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4427 mode, align dest to 4 bytes). */
4428 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4429 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4430 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4434 /* Write to the PC as if from a load instruction. */
4436 static void
4437 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4438 ULONGEST val)
4440 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4441 bx_write_pc (regs, val);
4442 else
4443 branch_write_pc (regs, dsc, val);
4446 /* Write to the PC as if from an ALU instruction. */
4448 static void
4449 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4450 ULONGEST val)
4452 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4453 bx_write_pc (regs, val);
4454 else
4455 branch_write_pc (regs, dsc, val);
4458 /* Helper for writing to registers for displaced stepping. Writing to the PC
4459 has a varying effects depending on the instruction which does the write:
4460 this is controlled by the WRITE_PC argument. */
4462 void
4463 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4464 int regno, ULONGEST val, enum pc_write_style write_pc)
4466 if (regno == ARM_PC_REGNUM)
4468 if (debug_displaced)
4469 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4470 (unsigned long) val);
4471 switch (write_pc)
4473 case BRANCH_WRITE_PC:
4474 branch_write_pc (regs, dsc, val);
4475 break;
4477 case BX_WRITE_PC:
4478 bx_write_pc (regs, val);
4479 break;
4481 case LOAD_WRITE_PC:
4482 load_write_pc (regs, dsc, val);
4483 break;
4485 case ALU_WRITE_PC:
4486 alu_write_pc (regs, dsc, val);
4487 break;
4489 case CANNOT_WRITE_PC:
4490 warning (_("Instruction wrote to PC in an unexpected way when "
4491 "single-stepping"));
4492 break;
4494 default:
4495 internal_error (__FILE__, __LINE__,
4496 _("Invalid argument to displaced_write_reg"));
4499 dsc->wrote_to_pc = 1;
4501 else
4503 if (debug_displaced)
4504 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4505 regno, (unsigned long) val);
4506 regcache_cooked_write_unsigned (regs, regno, val);
4510 /* This function is used to concisely determine if an instruction INSN
4511 references PC. Register fields of interest in INSN should have the
4512 corresponding fields of BITMASK set to 0b1111. The function
4513 returns return 1 if any of these fields in INSN reference the PC
4514 (also 0b1111, r15), else it returns 0. */
4516 static int
4517 insn_references_pc (uint32_t insn, uint32_t bitmask)
4519 uint32_t lowbit = 1;
4521 while (bitmask != 0)
4523 uint32_t mask;
4525 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4528 if (!lowbit)
4529 break;
4531 mask = lowbit * 0xf;
4533 if ((insn & mask) == mask)
4534 return 1;
4536 bitmask &= ~mask;
4539 return 0;
4542 /* The simplest copy function. Many instructions have the same effect no
4543 matter what address they are executed at: in those cases, use this. */
4545 static int
4546 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4547 const char *iname, arm_displaced_step_closure *dsc)
4549 if (debug_displaced)
4550 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4551 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4552 iname);
4554 dsc->modinsn[0] = insn;
4556 return 0;
4559 static int
4560 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4561 uint16_t insn2, const char *iname,
4562 arm_displaced_step_closure *dsc)
4564 if (debug_displaced)
4565 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4566 "opcode/class '%s' unmodified\n", insn1, insn2,
4567 iname);
4569 dsc->modinsn[0] = insn1;
4570 dsc->modinsn[1] = insn2;
4571 dsc->numinsns = 2;
4573 return 0;
4576 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4577 modification. */
4578 static int
4579 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4580 const char *iname,
4581 arm_displaced_step_closure *dsc)
4583 if (debug_displaced)
4584 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4585 "opcode/class '%s' unmodified\n", insn,
4586 iname);
4588 dsc->modinsn[0] = insn;
4590 return 0;
4593 /* Preload instructions with immediate offset. */
4595 static void
4596 cleanup_preload (struct gdbarch *gdbarch,
4597 struct regcache *regs, arm_displaced_step_closure *dsc)
4599 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4600 if (!dsc->u.preload.immed)
4601 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4604 static void
4605 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4606 arm_displaced_step_closure *dsc, unsigned int rn)
4608 ULONGEST rn_val;
4609 /* Preload instructions:
4611 {pli/pld} [rn, #+/-imm]
4613 {pli/pld} [r0, #+/-imm]. */
4615 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4616 rn_val = displaced_read_reg (regs, dsc, rn);
4617 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4618 dsc->u.preload.immed = 1;
4620 dsc->cleanup = &cleanup_preload;
4623 static int
4624 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4625 arm_displaced_step_closure *dsc)
4627 unsigned int rn = bits (insn, 16, 19);
4629 if (!insn_references_pc (insn, 0x000f0000ul))
4630 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4632 if (debug_displaced)
4633 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4634 (unsigned long) insn);
4636 dsc->modinsn[0] = insn & 0xfff0ffff;
4638 install_preload (gdbarch, regs, dsc, rn);
4640 return 0;
4643 static int
4644 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4645 struct regcache *regs, arm_displaced_step_closure *dsc)
4647 unsigned int rn = bits (insn1, 0, 3);
4648 unsigned int u_bit = bit (insn1, 7);
4649 int imm12 = bits (insn2, 0, 11);
4650 ULONGEST pc_val;
4652 if (rn != ARM_PC_REGNUM)
4653 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4655 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4656 PLD (literal) Encoding T1. */
4657 if (debug_displaced)
4658 fprintf_unfiltered (gdb_stdlog,
4659 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4660 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4661 imm12);
4663 if (!u_bit)
4664 imm12 = -1 * imm12;
4666 /* Rewrite instruction {pli/pld} PC imm12 into:
4667 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4669 {pli/pld} [r0, r1]
4671 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4673 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4674 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4676 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4678 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4679 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4680 dsc->u.preload.immed = 0;
4682 /* {pli/pld} [r0, r1] */
4683 dsc->modinsn[0] = insn1 & 0xfff0;
4684 dsc->modinsn[1] = 0xf001;
4685 dsc->numinsns = 2;
4687 dsc->cleanup = &cleanup_preload;
4688 return 0;
4691 /* Preload instructions with register offset. */
4693 static void
4694 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4695 arm_displaced_step_closure *dsc, unsigned int rn,
4696 unsigned int rm)
4698 ULONGEST rn_val, rm_val;
4700 /* Preload register-offset instructions:
4702 {pli/pld} [rn, rm {, shift}]
4704 {pli/pld} [r0, r1 {, shift}]. */
4706 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4707 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4708 rn_val = displaced_read_reg (regs, dsc, rn);
4709 rm_val = displaced_read_reg (regs, dsc, rm);
4710 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4711 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4712 dsc->u.preload.immed = 0;
4714 dsc->cleanup = &cleanup_preload;
4717 static int
4718 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4719 struct regcache *regs,
4720 arm_displaced_step_closure *dsc)
4722 unsigned int rn = bits (insn, 16, 19);
4723 unsigned int rm = bits (insn, 0, 3);
4726 if (!insn_references_pc (insn, 0x000f000ful))
4727 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4729 if (debug_displaced)
4730 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4731 (unsigned long) insn);
4733 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4735 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4736 return 0;
4739 /* Copy/cleanup coprocessor load and store instructions. */
4741 static void
4742 cleanup_copro_load_store (struct gdbarch *gdbarch,
4743 struct regcache *regs,
4744 arm_displaced_step_closure *dsc)
4746 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4748 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4750 if (dsc->u.ldst.writeback)
4751 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4754 static void
4755 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4756 arm_displaced_step_closure *dsc,
4757 int writeback, unsigned int rn)
4759 ULONGEST rn_val;
4761 /* Coprocessor load/store instructions:
4763 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4765 {stc/stc2} [r0, #+/-imm].
4767 ldc/ldc2 are handled identically. */
4769 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4770 rn_val = displaced_read_reg (regs, dsc, rn);
4771 /* PC should be 4-byte aligned. */
4772 rn_val = rn_val & 0xfffffffc;
4773 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4775 dsc->u.ldst.writeback = writeback;
4776 dsc->u.ldst.rn = rn;
4778 dsc->cleanup = &cleanup_copro_load_store;
4781 static int
4782 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4783 struct regcache *regs,
4784 arm_displaced_step_closure *dsc)
4786 unsigned int rn = bits (insn, 16, 19);
4788 if (!insn_references_pc (insn, 0x000f0000ul))
4789 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4791 if (debug_displaced)
4792 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4793 "load/store insn %.8lx\n", (unsigned long) insn);
4795 dsc->modinsn[0] = insn & 0xfff0ffff;
4797 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4799 return 0;
4802 static int
4803 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4804 uint16_t insn2, struct regcache *regs,
4805 arm_displaced_step_closure *dsc)
4807 unsigned int rn = bits (insn1, 0, 3);
4809 if (rn != ARM_PC_REGNUM)
4810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4811 "copro load/store", dsc);
4813 if (debug_displaced)
4814 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4815 "load/store insn %.4x%.4x\n", insn1, insn2);
4817 dsc->modinsn[0] = insn1 & 0xfff0;
4818 dsc->modinsn[1] = insn2;
4819 dsc->numinsns = 2;
4821 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4822 doesn't support writeback, so pass 0. */
4823 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4825 return 0;
4828 /* Clean up branch instructions (actually perform the branch, by setting
4829 PC). */
4831 static void
4832 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4833 arm_displaced_step_closure *dsc)
4835 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4836 int branch_taken = condition_true (dsc->u.branch.cond, status);
4837 enum pc_write_style write_pc = dsc->u.branch.exchange
4838 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4840 if (!branch_taken)
4841 return;
4843 if (dsc->u.branch.link)
4845 /* The value of LR should be the next insn of current one. In order
4846 not to confuse logic hanlding later insn `bx lr', if current insn mode
4847 is Thumb, the bit 0 of LR value should be set to 1. */
4848 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4850 if (dsc->is_thumb)
4851 next_insn_addr |= 0x1;
4853 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4854 CANNOT_WRITE_PC);
4857 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4860 /* Copy B/BL/BLX instructions with immediate destinations. */
4862 static void
4863 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4864 arm_displaced_step_closure *dsc,
4865 unsigned int cond, int exchange, int link, long offset)
4867 /* Implement "BL<cond> <label>" as:
4869 Preparation: cond <- instruction condition
4870 Insn: mov r0, r0 (nop)
4871 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4873 B<cond> similar, but don't set r14 in cleanup. */
4875 dsc->u.branch.cond = cond;
4876 dsc->u.branch.link = link;
4877 dsc->u.branch.exchange = exchange;
4879 dsc->u.branch.dest = dsc->insn_addr;
4880 if (link && exchange)
4881 /* For BLX, offset is computed from the Align (PC, 4). */
4882 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4884 if (dsc->is_thumb)
4885 dsc->u.branch.dest += 4 + offset;
4886 else
4887 dsc->u.branch.dest += 8 + offset;
4889 dsc->cleanup = &cleanup_branch;
4891 static int
4892 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4893 struct regcache *regs, arm_displaced_step_closure *dsc)
4895 unsigned int cond = bits (insn, 28, 31);
4896 int exchange = (cond == 0xf);
4897 int link = exchange || bit (insn, 24);
4898 long offset;
4900 if (debug_displaced)
4901 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4902 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4903 (unsigned long) insn);
4904 if (exchange)
4905 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4906 then arrange the switch into Thumb mode. */
4907 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4908 else
4909 offset = bits (insn, 0, 23) << 2;
4911 if (bit (offset, 25))
4912 offset = offset | ~0x3ffffff;
4914 dsc->modinsn[0] = ARM_NOP;
4916 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4917 return 0;
4920 static int
4921 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4922 uint16_t insn2, struct regcache *regs,
4923 arm_displaced_step_closure *dsc)
4925 int link = bit (insn2, 14);
4926 int exchange = link && !bit (insn2, 12);
4927 int cond = INST_AL;
4928 long offset = 0;
4929 int j1 = bit (insn2, 13);
4930 int j2 = bit (insn2, 11);
4931 int s = sbits (insn1, 10, 10);
4932 int i1 = !(j1 ^ bit (insn1, 10));
4933 int i2 = !(j2 ^ bit (insn1, 10));
4935 if (!link && !exchange) /* B */
4937 offset = (bits (insn2, 0, 10) << 1);
4938 if (bit (insn2, 12)) /* Encoding T4 */
4940 offset |= (bits (insn1, 0, 9) << 12)
4941 | (i2 << 22)
4942 | (i1 << 23)
4943 | (s << 24);
4944 cond = INST_AL;
4946 else /* Encoding T3 */
4948 offset |= (bits (insn1, 0, 5) << 12)
4949 | (j1 << 18)
4950 | (j2 << 19)
4951 | (s << 20);
4952 cond = bits (insn1, 6, 9);
4955 else
4957 offset = (bits (insn1, 0, 9) << 12);
4958 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4959 offset |= exchange ?
4960 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4963 if (debug_displaced)
4964 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4965 "%.4x %.4x with offset %.8lx\n",
4966 link ? (exchange) ? "blx" : "bl" : "b",
4967 insn1, insn2, offset);
4969 dsc->modinsn[0] = THUMB_NOP;
4971 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4972 return 0;
4975 /* Copy B Thumb instructions. */
4976 static int
4977 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4978 arm_displaced_step_closure *dsc)
4980 unsigned int cond = 0;
4981 int offset = 0;
4982 unsigned short bit_12_15 = bits (insn, 12, 15);
4983 CORE_ADDR from = dsc->insn_addr;
4985 if (bit_12_15 == 0xd)
4987 /* offset = SignExtend (imm8:0, 32) */
4988 offset = sbits ((insn << 1), 0, 8);
4989 cond = bits (insn, 8, 11);
4991 else if (bit_12_15 == 0xe) /* Encoding T2 */
4993 offset = sbits ((insn << 1), 0, 11);
4994 cond = INST_AL;
4997 if (debug_displaced)
4998 fprintf_unfiltered (gdb_stdlog,
4999 "displaced: copying b immediate insn %.4x "
5000 "with offset %d\n", insn, offset);
5002 dsc->u.branch.cond = cond;
5003 dsc->u.branch.link = 0;
5004 dsc->u.branch.exchange = 0;
5005 dsc->u.branch.dest = from + 4 + offset;
5007 dsc->modinsn[0] = THUMB_NOP;
5009 dsc->cleanup = &cleanup_branch;
5011 return 0;
5014 /* Copy BX/BLX with register-specified destinations. */
5016 static void
5017 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5018 arm_displaced_step_closure *dsc, int link,
5019 unsigned int cond, unsigned int rm)
5021 /* Implement {BX,BLX}<cond> <reg>" as:
5023 Preparation: cond <- instruction condition
5024 Insn: mov r0, r0 (nop)
5025 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5027 Don't set r14 in cleanup for BX. */
5029 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5031 dsc->u.branch.cond = cond;
5032 dsc->u.branch.link = link;
5034 dsc->u.branch.exchange = 1;
5036 dsc->cleanup = &cleanup_branch;
5039 static int
5040 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5041 struct regcache *regs, arm_displaced_step_closure *dsc)
5043 unsigned int cond = bits (insn, 28, 31);
5044 /* BX: x12xxx1x
5045 BLX: x12xxx3x. */
5046 int link = bit (insn, 5);
5047 unsigned int rm = bits (insn, 0, 3);
5049 if (debug_displaced)
5050 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5051 (unsigned long) insn);
5053 dsc->modinsn[0] = ARM_NOP;
5055 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5056 return 0;
5059 static int
5060 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5061 struct regcache *regs,
5062 arm_displaced_step_closure *dsc)
5064 int link = bit (insn, 7);
5065 unsigned int rm = bits (insn, 3, 6);
5067 if (debug_displaced)
5068 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5069 (unsigned short) insn);
5071 dsc->modinsn[0] = THUMB_NOP;
5073 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5075 return 0;
5079 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5081 static void
5082 cleanup_alu_imm (struct gdbarch *gdbarch,
5083 struct regcache *regs, arm_displaced_step_closure *dsc)
5085 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5086 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5087 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5088 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5091 static int
5092 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5093 arm_displaced_step_closure *dsc)
5095 unsigned int rn = bits (insn, 16, 19);
5096 unsigned int rd = bits (insn, 12, 15);
5097 unsigned int op = bits (insn, 21, 24);
5098 int is_mov = (op == 0xd);
5099 ULONGEST rd_val, rn_val;
5101 if (!insn_references_pc (insn, 0x000ff000ul))
5102 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5104 if (debug_displaced)
5105 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5106 "%.8lx\n", is_mov ? "move" : "ALU",
5107 (unsigned long) insn);
5109 /* Instruction is of form:
5111 <op><cond> rd, [rn,] #imm
5113 Rewrite as:
5115 Preparation: tmp1, tmp2 <- r0, r1;
5116 r0, r1 <- rd, rn
5117 Insn: <op><cond> r0, r1, #imm
5118 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5121 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5122 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5123 rn_val = displaced_read_reg (regs, dsc, rn);
5124 rd_val = displaced_read_reg (regs, dsc, rd);
5125 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5126 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5127 dsc->rd = rd;
5129 if (is_mov)
5130 dsc->modinsn[0] = insn & 0xfff00fff;
5131 else
5132 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5134 dsc->cleanup = &cleanup_alu_imm;
5136 return 0;
5139 static int
5140 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5141 uint16_t insn2, struct regcache *regs,
5142 arm_displaced_step_closure *dsc)
5144 unsigned int op = bits (insn1, 5, 8);
5145 unsigned int rn, rm, rd;
5146 ULONGEST rd_val, rn_val;
5148 rn = bits (insn1, 0, 3); /* Rn */
5149 rm = bits (insn2, 0, 3); /* Rm */
5150 rd = bits (insn2, 8, 11); /* Rd */
5152 /* This routine is only called for instruction MOV. */
5153 gdb_assert (op == 0x2 && rn == 0xf);
5155 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5156 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5158 if (debug_displaced)
5159 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5160 "ALU", insn1, insn2);
5162 /* Instruction is of form:
5164 <op><cond> rd, [rn,] #imm
5166 Rewrite as:
5168 Preparation: tmp1, tmp2 <- r0, r1;
5169 r0, r1 <- rd, rn
5170 Insn: <op><cond> r0, r1, #imm
5171 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5174 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5175 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5176 rn_val = displaced_read_reg (regs, dsc, rn);
5177 rd_val = displaced_read_reg (regs, dsc, rd);
5178 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5179 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5180 dsc->rd = rd;
5182 dsc->modinsn[0] = insn1;
5183 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5184 dsc->numinsns = 2;
5186 dsc->cleanup = &cleanup_alu_imm;
5188 return 0;
5191 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5193 static void
5194 cleanup_alu_reg (struct gdbarch *gdbarch,
5195 struct regcache *regs, arm_displaced_step_closure *dsc)
5197 ULONGEST rd_val;
5198 int i;
5200 rd_val = displaced_read_reg (regs, dsc, 0);
5202 for (i = 0; i < 3; i++)
5203 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5205 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5208 static void
5209 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5210 arm_displaced_step_closure *dsc,
5211 unsigned int rd, unsigned int rn, unsigned int rm)
5213 ULONGEST rd_val, rn_val, rm_val;
5215 /* Instruction is of form:
5217 <op><cond> rd, [rn,] rm [, <shift>]
5219 Rewrite as:
5221 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5222 r0, r1, r2 <- rd, rn, rm
5223 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5224 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5227 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5228 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5229 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5230 rd_val = displaced_read_reg (regs, dsc, rd);
5231 rn_val = displaced_read_reg (regs, dsc, rn);
5232 rm_val = displaced_read_reg (regs, dsc, rm);
5233 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5234 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5235 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5236 dsc->rd = rd;
5238 dsc->cleanup = &cleanup_alu_reg;
5241 static int
5242 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5243 arm_displaced_step_closure *dsc)
5245 unsigned int op = bits (insn, 21, 24);
5246 int is_mov = (op == 0xd);
5248 if (!insn_references_pc (insn, 0x000ff00ful))
5249 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5251 if (debug_displaced)
5252 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5253 is_mov ? "move" : "ALU", (unsigned long) insn);
5255 if (is_mov)
5256 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5257 else
5258 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5260 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5261 bits (insn, 0, 3));
5262 return 0;
5265 static int
5266 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5267 struct regcache *regs,
5268 arm_displaced_step_closure *dsc)
5270 unsigned rm, rd;
5272 rm = bits (insn, 3, 6);
5273 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5275 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5276 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5278 if (debug_displaced)
5279 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5280 (unsigned short) insn);
5282 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5284 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5286 return 0;
5289 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5291 static void
5292 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5293 struct regcache *regs,
5294 arm_displaced_step_closure *dsc)
5296 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5297 int i;
5299 for (i = 0; i < 4; i++)
5300 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5302 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5305 static void
5306 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5307 arm_displaced_step_closure *dsc,
5308 unsigned int rd, unsigned int rn, unsigned int rm,
5309 unsigned rs)
5311 int i;
5312 ULONGEST rd_val, rn_val, rm_val, rs_val;
5314 /* Instruction is of form:
5316 <op><cond> rd, [rn,] rm, <shift> rs
5318 Rewrite as:
5320 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5321 r0, r1, r2, r3 <- rd, rn, rm, rs
5322 Insn: <op><cond> r0, r1, r2, <shift> r3
5323 Cleanup: tmp5 <- r0
5324 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5325 rd <- tmp5
5328 for (i = 0; i < 4; i++)
5329 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5331 rd_val = displaced_read_reg (regs, dsc, rd);
5332 rn_val = displaced_read_reg (regs, dsc, rn);
5333 rm_val = displaced_read_reg (regs, dsc, rm);
5334 rs_val = displaced_read_reg (regs, dsc, rs);
5335 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5336 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5337 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5338 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5339 dsc->rd = rd;
5340 dsc->cleanup = &cleanup_alu_shifted_reg;
5343 static int
5344 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5345 struct regcache *regs,
5346 arm_displaced_step_closure *dsc)
5348 unsigned int op = bits (insn, 21, 24);
5349 int is_mov = (op == 0xd);
5350 unsigned int rd, rn, rm, rs;
5352 if (!insn_references_pc (insn, 0x000fff0ful))
5353 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5355 if (debug_displaced)
5356 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5357 "%.8lx\n", is_mov ? "move" : "ALU",
5358 (unsigned long) insn);
5360 rn = bits (insn, 16, 19);
5361 rm = bits (insn, 0, 3);
5362 rs = bits (insn, 8, 11);
5363 rd = bits (insn, 12, 15);
5365 if (is_mov)
5366 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5367 else
5368 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5370 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5372 return 0;
5375 /* Clean up load instructions. */
5377 static void
5378 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5379 arm_displaced_step_closure *dsc)
5381 ULONGEST rt_val, rt_val2 = 0, rn_val;
5383 rt_val = displaced_read_reg (regs, dsc, 0);
5384 if (dsc->u.ldst.xfersize == 8)
5385 rt_val2 = displaced_read_reg (regs, dsc, 1);
5386 rn_val = displaced_read_reg (regs, dsc, 2);
5388 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5389 if (dsc->u.ldst.xfersize > 4)
5390 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5391 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5392 if (!dsc->u.ldst.immed)
5393 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5395 /* Handle register writeback. */
5396 if (dsc->u.ldst.writeback)
5397 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5398 /* Put result in right place. */
5399 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5400 if (dsc->u.ldst.xfersize == 8)
5401 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5404 /* Clean up store instructions. */
5406 static void
5407 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5408 arm_displaced_step_closure *dsc)
5410 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5412 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5413 if (dsc->u.ldst.xfersize > 4)
5414 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5415 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5416 if (!dsc->u.ldst.immed)
5417 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5418 if (!dsc->u.ldst.restore_r4)
5419 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5421 /* Writeback. */
5422 if (dsc->u.ldst.writeback)
5423 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5426 /* Copy "extra" load/store instructions. These are halfword/doubleword
5427 transfers, which have a different encoding to byte/word transfers. */
5429 static int
5430 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5431 struct regcache *regs, arm_displaced_step_closure *dsc)
5433 unsigned int op1 = bits (insn, 20, 24);
5434 unsigned int op2 = bits (insn, 5, 6);
5435 unsigned int rt = bits (insn, 12, 15);
5436 unsigned int rn = bits (insn, 16, 19);
5437 unsigned int rm = bits (insn, 0, 3);
5438 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5439 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5440 int immed = (op1 & 0x4) != 0;
5441 int opcode;
5442 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5444 if (!insn_references_pc (insn, 0x000ff00ful))
5445 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5447 if (debug_displaced)
5448 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5449 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5450 (unsigned long) insn);
5452 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5454 if (opcode < 0)
5455 internal_error (__FILE__, __LINE__,
5456 _("copy_extra_ld_st: instruction decode error"));
5458 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5459 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5460 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5461 if (!immed)
5462 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5464 rt_val = displaced_read_reg (regs, dsc, rt);
5465 if (bytesize[opcode] == 8)
5466 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5467 rn_val = displaced_read_reg (regs, dsc, rn);
5468 if (!immed)
5469 rm_val = displaced_read_reg (regs, dsc, rm);
5471 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5472 if (bytesize[opcode] == 8)
5473 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5474 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5475 if (!immed)
5476 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5478 dsc->rd = rt;
5479 dsc->u.ldst.xfersize = bytesize[opcode];
5480 dsc->u.ldst.rn = rn;
5481 dsc->u.ldst.immed = immed;
5482 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5483 dsc->u.ldst.restore_r4 = 0;
5485 if (immed)
5486 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5488 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5489 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5490 else
5491 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5493 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5494 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5496 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5498 return 0;
5501 /* Copy byte/half word/word loads and stores. */
5503 static void
5504 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5505 arm_displaced_step_closure *dsc, int load,
5506 int immed, int writeback, int size, int usermode,
5507 int rt, int rm, int rn)
5509 ULONGEST rt_val, rn_val, rm_val = 0;
5511 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5512 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5513 if (!immed)
5514 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5515 if (!load)
5516 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5518 rt_val = displaced_read_reg (regs, dsc, rt);
5519 rn_val = displaced_read_reg (regs, dsc, rn);
5520 if (!immed)
5521 rm_val = displaced_read_reg (regs, dsc, rm);
5523 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5524 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5525 if (!immed)
5526 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5527 dsc->rd = rt;
5528 dsc->u.ldst.xfersize = size;
5529 dsc->u.ldst.rn = rn;
5530 dsc->u.ldst.immed = immed;
5531 dsc->u.ldst.writeback = writeback;
5533 /* To write PC we can do:
5535 Before this sequence of instructions:
5536 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5537 r2 is the Rn value got from dispalced_read_reg.
5539 Insn1: push {pc} Write address of STR instruction + offset on stack
5540 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5541 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5542 = addr(Insn1) + offset - addr(Insn3) - 8
5543 = offset - 16
5544 Insn4: add r4, r4, #8 r4 = offset - 8
5545 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5546 = from + offset
5547 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5549 Otherwise we don't know what value to write for PC, since the offset is
5550 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5551 of this can be found in Section "Saving from r15" in
5552 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5554 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5558 static int
5559 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5560 uint16_t insn2, struct regcache *regs,
5561 arm_displaced_step_closure *dsc, int size)
5563 unsigned int u_bit = bit (insn1, 7);
5564 unsigned int rt = bits (insn2, 12, 15);
5565 int imm12 = bits (insn2, 0, 11);
5566 ULONGEST pc_val;
5568 if (debug_displaced)
5569 fprintf_unfiltered (gdb_stdlog,
5570 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5571 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5572 imm12);
5574 if (!u_bit)
5575 imm12 = -1 * imm12;
5577 /* Rewrite instruction LDR Rt imm12 into:
5579 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5581 LDR R0, R2, R3,
5583 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5586 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5587 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5588 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5590 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5592 pc_val = pc_val & 0xfffffffc;
5594 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5595 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5597 dsc->rd = rt;
5599 dsc->u.ldst.xfersize = size;
5600 dsc->u.ldst.immed = 0;
5601 dsc->u.ldst.writeback = 0;
5602 dsc->u.ldst.restore_r4 = 0;
5604 /* LDR R0, R2, R3 */
5605 dsc->modinsn[0] = 0xf852;
5606 dsc->modinsn[1] = 0x3;
5607 dsc->numinsns = 2;
5609 dsc->cleanup = &cleanup_load;
5611 return 0;
5614 static int
5615 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5616 uint16_t insn2, struct regcache *regs,
5617 arm_displaced_step_closure *dsc,
5618 int writeback, int immed)
5620 unsigned int rt = bits (insn2, 12, 15);
5621 unsigned int rn = bits (insn1, 0, 3);
5622 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5623 /* In LDR (register), there is also a register Rm, which is not allowed to
5624 be PC, so we don't have to check it. */
5626 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5627 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5628 dsc);
5630 if (debug_displaced)
5631 fprintf_unfiltered (gdb_stdlog,
5632 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5633 rt, rn, insn1, insn2);
5635 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5636 0, rt, rm, rn);
5638 dsc->u.ldst.restore_r4 = 0;
5640 if (immed)
5641 /* ldr[b]<cond> rt, [rn, #imm], etc.
5643 ldr[b]<cond> r0, [r2, #imm]. */
5645 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5646 dsc->modinsn[1] = insn2 & 0x0fff;
5648 else
5649 /* ldr[b]<cond> rt, [rn, rm], etc.
5651 ldr[b]<cond> r0, [r2, r3]. */
5653 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5654 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5657 dsc->numinsns = 2;
5659 return 0;
5663 static int
5664 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5665 struct regcache *regs,
5666 arm_displaced_step_closure *dsc,
5667 int load, int size, int usermode)
5669 int immed = !bit (insn, 25);
5670 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5671 unsigned int rt = bits (insn, 12, 15);
5672 unsigned int rn = bits (insn, 16, 19);
5673 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5675 if (!insn_references_pc (insn, 0x000ff00ful))
5676 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5678 if (debug_displaced)
5679 fprintf_unfiltered (gdb_stdlog,
5680 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5681 load ? (size == 1 ? "ldrb" : "ldr")
5682 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5683 rt, rn,
5684 (unsigned long) insn);
5686 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5687 usermode, rt, rm, rn);
5689 if (load || rt != ARM_PC_REGNUM)
5691 dsc->u.ldst.restore_r4 = 0;
5693 if (immed)
5694 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5696 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5697 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5698 else
5699 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5701 {ldr,str}[b]<cond> r0, [r2, r3]. */
5702 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5704 else
5706 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5707 dsc->u.ldst.restore_r4 = 1;
5708 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5709 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5710 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5711 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5712 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5714 /* As above. */
5715 if (immed)
5716 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5717 else
5718 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5720 dsc->numinsns = 6;
5723 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5725 return 0;
5728 /* Cleanup LDM instructions with fully-populated register list. This is an
5729 unfortunate corner case: it's impossible to implement correctly by modifying
5730 the instruction. The issue is as follows: we have an instruction,
5732 ldm rN, {r0-r15}
5734 which we must rewrite to avoid loading PC. A possible solution would be to
5735 do the load in two halves, something like (with suitable cleanup
5736 afterwards):
5738 mov r8, rN
5739 ldm[id][ab] r8!, {r0-r7}
5740 str r7, <temp>
5741 ldm[id][ab] r8, {r7-r14}
5742 <bkpt>
5744 but at present there's no suitable place for <temp>, since the scratch space
5745 is overwritten before the cleanup routine is called. For now, we simply
5746 emulate the instruction. */
5748 static void
5749 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5750 arm_displaced_step_closure *dsc)
5752 int inc = dsc->u.block.increment;
5753 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5754 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5755 uint32_t regmask = dsc->u.block.regmask;
5756 int regno = inc ? 0 : 15;
5757 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5758 int exception_return = dsc->u.block.load && dsc->u.block.user
5759 && (regmask & 0x8000) != 0;
5760 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5761 int do_transfer = condition_true (dsc->u.block.cond, status);
5762 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5764 if (!do_transfer)
5765 return;
5767 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5768 sensible we can do here. Complain loudly. */
5769 if (exception_return)
5770 error (_("Cannot single-step exception return"));
5772 /* We don't handle any stores here for now. */
5773 gdb_assert (dsc->u.block.load != 0);
5775 if (debug_displaced)
5776 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5777 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5778 dsc->u.block.increment ? "inc" : "dec",
5779 dsc->u.block.before ? "before" : "after");
5781 while (regmask)
5783 uint32_t memword;
5785 if (inc)
5786 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5787 regno++;
5788 else
5789 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5790 regno--;
5792 xfer_addr += bump_before;
5794 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5795 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5797 xfer_addr += bump_after;
5799 regmask &= ~(1 << regno);
5802 if (dsc->u.block.writeback)
5803 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5804 CANNOT_WRITE_PC);
5807 /* Clean up an STM which included the PC in the register list. */
5809 static void
5810 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5811 arm_displaced_step_closure *dsc)
5813 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5814 int store_executed = condition_true (dsc->u.block.cond, status);
5815 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5816 CORE_ADDR stm_insn_addr;
5817 uint32_t pc_val;
5818 long offset;
5819 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5821 /* If condition code fails, there's nothing else to do. */
5822 if (!store_executed)
5823 return;
5825 if (dsc->u.block.increment)
5827 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5829 if (dsc->u.block.before)
5830 pc_stored_at += 4;
5832 else
5834 pc_stored_at = dsc->u.block.xfer_addr;
5836 if (dsc->u.block.before)
5837 pc_stored_at -= 4;
5840 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5841 stm_insn_addr = dsc->scratch_base;
5842 offset = pc_val - stm_insn_addr;
5844 if (debug_displaced)
5845 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5846 "STM instruction\n", offset);
5848 /* Rewrite the stored PC to the proper value for the non-displaced original
5849 instruction. */
5850 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5851 dsc->insn_addr + offset);
5854 /* Clean up an LDM which includes the PC in the register list. We clumped all
5855 the registers in the transferred list into a contiguous range r0...rX (to
5856 avoid loading PC directly and losing control of the debugged program), so we
5857 must undo that here. */
5859 static void
5860 cleanup_block_load_pc (struct gdbarch *gdbarch,
5861 struct regcache *regs,
5862 arm_displaced_step_closure *dsc)
5864 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5865 int load_executed = condition_true (dsc->u.block.cond, status);
5866 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5867 unsigned int regs_loaded = bitcount (mask);
5868 unsigned int num_to_shuffle = regs_loaded, clobbered;
5870 /* The method employed here will fail if the register list is fully populated
5871 (we need to avoid loading PC directly). */
5872 gdb_assert (num_to_shuffle < 16);
5874 if (!load_executed)
5875 return;
5877 clobbered = (1 << num_to_shuffle) - 1;
5879 while (num_to_shuffle > 0)
5881 if ((mask & (1 << write_reg)) != 0)
5883 unsigned int read_reg = num_to_shuffle - 1;
5885 if (read_reg != write_reg)
5887 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5888 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5889 if (debug_displaced)
5890 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5891 "loaded register r%d to r%d\n"), read_reg,
5892 write_reg);
5894 else if (debug_displaced)
5895 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5896 "r%d already in the right place\n"),
5897 write_reg);
5899 clobbered &= ~(1 << write_reg);
5901 num_to_shuffle--;
5904 write_reg--;
5907 /* Restore any registers we scribbled over. */
5908 for (write_reg = 0; clobbered != 0; write_reg++)
5910 if ((clobbered & (1 << write_reg)) != 0)
5912 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5913 CANNOT_WRITE_PC);
5914 if (debug_displaced)
5915 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5916 "clobbered register r%d\n"), write_reg);
5917 clobbered &= ~(1 << write_reg);
5921 /* Perform register writeback manually. */
5922 if (dsc->u.block.writeback)
5924 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5926 if (dsc->u.block.increment)
5927 new_rn_val += regs_loaded * 4;
5928 else
5929 new_rn_val -= regs_loaded * 4;
5931 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5932 CANNOT_WRITE_PC);
5936 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5937 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5939 static int
5940 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5941 struct regcache *regs,
5942 arm_displaced_step_closure *dsc)
5944 int load = bit (insn, 20);
5945 int user = bit (insn, 22);
5946 int increment = bit (insn, 23);
5947 int before = bit (insn, 24);
5948 int writeback = bit (insn, 21);
5949 int rn = bits (insn, 16, 19);
5951 /* Block transfers which don't mention PC can be run directly
5952 out-of-line. */
5953 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5954 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5956 if (rn == ARM_PC_REGNUM)
5958 warning (_("displaced: Unpredictable LDM or STM with "
5959 "base register r15"));
5960 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5963 if (debug_displaced)
5964 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5965 "%.8lx\n", (unsigned long) insn);
5967 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5968 dsc->u.block.rn = rn;
5970 dsc->u.block.load = load;
5971 dsc->u.block.user = user;
5972 dsc->u.block.increment = increment;
5973 dsc->u.block.before = before;
5974 dsc->u.block.writeback = writeback;
5975 dsc->u.block.cond = bits (insn, 28, 31);
5977 dsc->u.block.regmask = insn & 0xffff;
5979 if (load)
5981 if ((insn & 0xffff) == 0xffff)
5983 /* LDM with a fully-populated register list. This case is
5984 particularly tricky. Implement for now by fully emulating the
5985 instruction (which might not behave perfectly in all cases, but
5986 these instructions should be rare enough for that not to matter
5987 too much). */
5988 dsc->modinsn[0] = ARM_NOP;
5990 dsc->cleanup = &cleanup_block_load_all;
5992 else
5994 /* LDM of a list of registers which includes PC. Implement by
5995 rewriting the list of registers to be transferred into a
5996 contiguous chunk r0...rX before doing the transfer, then shuffling
5997 registers into the correct places in the cleanup routine. */
5998 unsigned int regmask = insn & 0xffff;
5999 unsigned int num_in_list = bitcount (regmask), new_regmask;
6000 unsigned int i;
6002 for (i = 0; i < num_in_list; i++)
6003 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6005 /* Writeback makes things complicated. We need to avoid clobbering
6006 the base register with one of the registers in our modified
6007 register list, but just using a different register can't work in
6008 all cases, e.g.:
6010 ldm r14!, {r0-r13,pc}
6012 which would need to be rewritten as:
6014 ldm rN!, {r0-r14}
6016 but that can't work, because there's no free register for N.
6018 Solve this by turning off the writeback bit, and emulating
6019 writeback manually in the cleanup routine. */
6021 if (writeback)
6022 insn &= ~(1 << 21);
6024 new_regmask = (1 << num_in_list) - 1;
6026 if (debug_displaced)
6027 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6028 "{..., pc}: original reg list %.4x, modified "
6029 "list %.4x\n"), rn, writeback ? "!" : "",
6030 (int) insn & 0xffff, new_regmask);
6032 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6034 dsc->cleanup = &cleanup_block_load_pc;
6037 else
6039 /* STM of a list of registers which includes PC. Run the instruction
6040 as-is, but out of line: this will store the wrong value for the PC,
6041 so we must manually fix up the memory in the cleanup routine.
6042 Doing things this way has the advantage that we can auto-detect
6043 the offset of the PC write (which is architecture-dependent) in
6044 the cleanup routine. */
6045 dsc->modinsn[0] = insn;
6047 dsc->cleanup = &cleanup_block_store_pc;
6050 return 0;
6053 static int
6054 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6055 struct regcache *regs,
6056 arm_displaced_step_closure *dsc)
6058 int rn = bits (insn1, 0, 3);
6059 int load = bit (insn1, 4);
6060 int writeback = bit (insn1, 5);
6062 /* Block transfers which don't mention PC can be run directly
6063 out-of-line. */
6064 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6065 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6067 if (rn == ARM_PC_REGNUM)
6069 warning (_("displaced: Unpredictable LDM or STM with "
6070 "base register r15"));
6071 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6072 "unpredictable ldm/stm", dsc);
6075 if (debug_displaced)
6076 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6077 "%.4x%.4x\n", insn1, insn2);
6079 /* Clear bit 13, since it should be always zero. */
6080 dsc->u.block.regmask = (insn2 & 0xdfff);
6081 dsc->u.block.rn = rn;
6083 dsc->u.block.load = load;
6084 dsc->u.block.user = 0;
6085 dsc->u.block.increment = bit (insn1, 7);
6086 dsc->u.block.before = bit (insn1, 8);
6087 dsc->u.block.writeback = writeback;
6088 dsc->u.block.cond = INST_AL;
6089 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6091 if (load)
6093 if (dsc->u.block.regmask == 0xffff)
6095 /* This branch is impossible to happen. */
6096 gdb_assert (0);
6098 else
6100 unsigned int regmask = dsc->u.block.regmask;
6101 unsigned int num_in_list = bitcount (regmask), new_regmask;
6102 unsigned int i;
6104 for (i = 0; i < num_in_list; i++)
6105 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6107 if (writeback)
6108 insn1 &= ~(1 << 5);
6110 new_regmask = (1 << num_in_list) - 1;
6112 if (debug_displaced)
6113 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6114 "{..., pc}: original reg list %.4x, modified "
6115 "list %.4x\n"), rn, writeback ? "!" : "",
6116 (int) dsc->u.block.regmask, new_regmask);
6118 dsc->modinsn[0] = insn1;
6119 dsc->modinsn[1] = (new_regmask & 0xffff);
6120 dsc->numinsns = 2;
6122 dsc->cleanup = &cleanup_block_load_pc;
6125 else
6127 dsc->modinsn[0] = insn1;
6128 dsc->modinsn[1] = insn2;
6129 dsc->numinsns = 2;
6130 dsc->cleanup = &cleanup_block_store_pc;
6132 return 0;
6135 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6136 This is used to avoid a dependency on BFD's bfd_endian enum. */
6138 ULONGEST
6139 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6140 int byte_order)
6142 return read_memory_unsigned_integer (memaddr, len,
6143 (enum bfd_endian) byte_order);
6146 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6148 CORE_ADDR
6149 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6150 CORE_ADDR val)
6152 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6155 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6157 static CORE_ADDR
6158 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6160 return 0;
6163 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6166 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6168 return arm_is_thumb (self->regcache);
6171 /* single_step() is called just before we want to resume the inferior,
6172 if we want to single-step it but there is no hardware or kernel
6173 single-step support. We find the target of the coming instructions
6174 and breakpoint them. */
6176 std::vector<CORE_ADDR>
6177 arm_software_single_step (struct regcache *regcache)
6179 struct gdbarch *gdbarch = regcache->arch ();
6180 struct arm_get_next_pcs next_pcs_ctx;
6182 arm_get_next_pcs_ctor (&next_pcs_ctx,
6183 &arm_get_next_pcs_ops,
6184 gdbarch_byte_order (gdbarch),
6185 gdbarch_byte_order_for_code (gdbarch),
6187 regcache);
6189 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6191 for (CORE_ADDR &pc_ref : next_pcs)
6192 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6194 return next_pcs;
6197 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6198 for Linux, where some SVC instructions must be treated specially. */
6200 static void
6201 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6202 arm_displaced_step_closure *dsc)
6204 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6206 if (debug_displaced)
6207 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6208 "%.8lx\n", (unsigned long) resume_addr);
6210 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6214 /* Common copy routine for svc instruciton. */
6216 static int
6217 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6218 arm_displaced_step_closure *dsc)
6220 /* Preparation: none.
6221 Insn: unmodified svc.
6222 Cleanup: pc <- insn_addr + insn_size. */
6224 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6225 instruction. */
6226 dsc->wrote_to_pc = 1;
6228 /* Allow OS-specific code to override SVC handling. */
6229 if (dsc->u.svc.copy_svc_os)
6230 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6231 else
6233 dsc->cleanup = &cleanup_svc;
6234 return 0;
6238 static int
6239 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6240 struct regcache *regs, arm_displaced_step_closure *dsc)
6243 if (debug_displaced)
6244 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6245 (unsigned long) insn);
6247 dsc->modinsn[0] = insn;
6249 return install_svc (gdbarch, regs, dsc);
6252 static int
6253 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6254 struct regcache *regs, arm_displaced_step_closure *dsc)
6257 if (debug_displaced)
6258 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6259 insn);
6261 dsc->modinsn[0] = insn;
6263 return install_svc (gdbarch, regs, dsc);
6266 /* Copy undefined instructions. */
6268 static int
6269 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6270 arm_displaced_step_closure *dsc)
6272 if (debug_displaced)
6273 fprintf_unfiltered (gdb_stdlog,
6274 "displaced: copying undefined insn %.8lx\n",
6275 (unsigned long) insn);
6277 dsc->modinsn[0] = insn;
6279 return 0;
6282 static int
6283 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6284 arm_displaced_step_closure *dsc)
6287 if (debug_displaced)
6288 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6289 "%.4x %.4x\n", (unsigned short) insn1,
6290 (unsigned short) insn2);
6292 dsc->modinsn[0] = insn1;
6293 dsc->modinsn[1] = insn2;
6294 dsc->numinsns = 2;
6296 return 0;
6299 /* Copy unpredictable instructions. */
6301 static int
6302 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6303 arm_displaced_step_closure *dsc)
6305 if (debug_displaced)
6306 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6307 "%.8lx\n", (unsigned long) insn);
6309 dsc->modinsn[0] = insn;
6311 return 0;
6314 /* The decode_* functions are instruction decoding helpers. They mostly follow
6315 the presentation in the ARM ARM. */
6317 static int
6318 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6319 struct regcache *regs,
6320 arm_displaced_step_closure *dsc)
6322 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6323 unsigned int rn = bits (insn, 16, 19);
6325 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6326 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6327 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6328 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6329 else if ((op1 & 0x60) == 0x20)
6330 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6331 else if ((op1 & 0x71) == 0x40)
6332 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6333 dsc);
6334 else if ((op1 & 0x77) == 0x41)
6335 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6336 else if ((op1 & 0x77) == 0x45)
6337 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6338 else if ((op1 & 0x77) == 0x51)
6340 if (rn != 0xf)
6341 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6342 else
6343 return arm_copy_unpred (gdbarch, insn, dsc);
6345 else if ((op1 & 0x77) == 0x55)
6346 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6347 else if (op1 == 0x57)
6348 switch (op2)
6350 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6351 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6352 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6353 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6354 default: return arm_copy_unpred (gdbarch, insn, dsc);
6356 else if ((op1 & 0x63) == 0x43)
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6358 else if ((op2 & 0x1) == 0x0)
6359 switch (op1 & ~0x80)
6361 case 0x61:
6362 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6363 case 0x65:
6364 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6365 case 0x71: case 0x75:
6366 /* pld/pldw reg. */
6367 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6368 case 0x63: case 0x67: case 0x73: case 0x77:
6369 return arm_copy_unpred (gdbarch, insn, dsc);
6370 default:
6371 return arm_copy_undef (gdbarch, insn, dsc);
6373 else
6374 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6377 static int
6378 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6379 struct regcache *regs,
6380 arm_displaced_step_closure *dsc)
6382 if (bit (insn, 27) == 0)
6383 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6384 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6385 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6387 case 0x0: case 0x2:
6388 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6390 case 0x1: case 0x3:
6391 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6393 case 0x4: case 0x5: case 0x6: case 0x7:
6394 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6396 case 0x8:
6397 switch ((insn & 0xe00000) >> 21)
6399 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6400 /* stc/stc2. */
6401 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6403 case 0x2:
6404 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6406 default:
6407 return arm_copy_undef (gdbarch, insn, dsc);
6410 case 0x9:
6412 int rn_f = (bits (insn, 16, 19) == 0xf);
6413 switch ((insn & 0xe00000) >> 21)
6415 case 0x1: case 0x3:
6416 /* ldc/ldc2 imm (undefined for rn == pc). */
6417 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6418 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6420 case 0x2:
6421 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6423 case 0x4: case 0x5: case 0x6: case 0x7:
6424 /* ldc/ldc2 lit (undefined for rn != pc). */
6425 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6426 : arm_copy_undef (gdbarch, insn, dsc);
6428 default:
6429 return arm_copy_undef (gdbarch, insn, dsc);
6433 case 0xa:
6434 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6436 case 0xb:
6437 if (bits (insn, 16, 19) == 0xf)
6438 /* ldc/ldc2 lit. */
6439 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6440 else
6441 return arm_copy_undef (gdbarch, insn, dsc);
6443 case 0xc:
6444 if (bit (insn, 4))
6445 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6446 else
6447 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6449 case 0xd:
6450 if (bit (insn, 4))
6451 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6452 else
6453 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6455 default:
6456 return arm_copy_undef (gdbarch, insn, dsc);
6460 /* Decode miscellaneous instructions in dp/misc encoding space. */
6462 static int
6463 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6464 struct regcache *regs,
6465 arm_displaced_step_closure *dsc)
6467 unsigned int op2 = bits (insn, 4, 6);
6468 unsigned int op = bits (insn, 21, 22);
6470 switch (op2)
6472 case 0x0:
6473 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6475 case 0x1:
6476 if (op == 0x1) /* bx. */
6477 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6478 else if (op == 0x3)
6479 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6480 else
6481 return arm_copy_undef (gdbarch, insn, dsc);
6483 case 0x2:
6484 if (op == 0x1)
6485 /* Not really supported. */
6486 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6487 else
6488 return arm_copy_undef (gdbarch, insn, dsc);
6490 case 0x3:
6491 if (op == 0x1)
6492 return arm_copy_bx_blx_reg (gdbarch, insn,
6493 regs, dsc); /* blx register. */
6494 else
6495 return arm_copy_undef (gdbarch, insn, dsc);
6497 case 0x5:
6498 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6500 case 0x7:
6501 if (op == 0x1)
6502 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6503 else if (op == 0x3)
6504 /* Not really supported. */
6505 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6506 /* Fall through. */
6508 default:
6509 return arm_copy_undef (gdbarch, insn, dsc);
6513 static int
6514 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6515 struct regcache *regs,
6516 arm_displaced_step_closure *dsc)
6518 if (bit (insn, 25))
6519 switch (bits (insn, 20, 24))
6521 case 0x10:
6522 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6524 case 0x14:
6525 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6527 case 0x12: case 0x16:
6528 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6530 default:
6531 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6533 else
6535 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6537 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6538 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6539 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6540 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6541 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6542 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6543 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6544 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6545 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6546 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6547 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6548 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6549 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6550 /* 2nd arg means "unprivileged". */
6551 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6552 dsc);
6555 /* Should be unreachable. */
6556 return 1;
6559 static int
6560 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6561 struct regcache *regs,
6562 arm_displaced_step_closure *dsc)
6564 int a = bit (insn, 25), b = bit (insn, 4);
6565 uint32_t op1 = bits (insn, 20, 24);
6567 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6568 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6570 else if ((!a && (op1 & 0x17) == 0x02)
6571 || (a && (op1 & 0x17) == 0x02 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6573 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6574 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6576 else if ((!a && (op1 & 0x17) == 0x03)
6577 || (a && (op1 & 0x17) == 0x03 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6579 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6580 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6581 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6582 else if ((!a && (op1 & 0x17) == 0x06)
6583 || (a && (op1 & 0x17) == 0x06 && !b))
6584 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6585 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6586 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6587 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6588 else if ((!a && (op1 & 0x17) == 0x07)
6589 || (a && (op1 & 0x17) == 0x07 && !b))
6590 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6592 /* Should be unreachable. */
6593 return 1;
6596 static int
6597 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6598 arm_displaced_step_closure *dsc)
6600 switch (bits (insn, 20, 24))
6602 case 0x00: case 0x01: case 0x02: case 0x03:
6603 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6605 case 0x04: case 0x05: case 0x06: case 0x07:
6606 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6608 case 0x08: case 0x09: case 0x0a: case 0x0b:
6609 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6610 return arm_copy_unmodified (gdbarch, insn,
6611 "decode/pack/unpack/saturate/reverse", dsc);
6613 case 0x18:
6614 if (bits (insn, 5, 7) == 0) /* op2. */
6616 if (bits (insn, 12, 15) == 0xf)
6617 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6618 else
6619 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6621 else
6622 return arm_copy_undef (gdbarch, insn, dsc);
6624 case 0x1a: case 0x1b:
6625 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6626 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6627 else
6628 return arm_copy_undef (gdbarch, insn, dsc);
6630 case 0x1c: case 0x1d:
6631 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6633 if (bits (insn, 0, 3) == 0xf)
6634 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6635 else
6636 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6638 else
6639 return arm_copy_undef (gdbarch, insn, dsc);
6641 case 0x1e: case 0x1f:
6642 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6643 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6644 else
6645 return arm_copy_undef (gdbarch, insn, dsc);
6648 /* Should be unreachable. */
6649 return 1;
6652 static int
6653 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6654 struct regcache *regs,
6655 arm_displaced_step_closure *dsc)
6657 if (bit (insn, 25))
6658 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6659 else
6660 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6663 static int
6664 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6665 struct regcache *regs,
6666 arm_displaced_step_closure *dsc)
6668 unsigned int opcode = bits (insn, 20, 24);
6670 switch (opcode)
6672 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6673 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6675 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6676 case 0x12: case 0x16:
6677 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6679 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6680 case 0x13: case 0x17:
6681 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6683 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6684 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6685 /* Note: no writeback for these instructions. Bit 25 will always be
6686 zero though (via caller), so the following works OK. */
6687 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6690 /* Should be unreachable. */
6691 return 1;
6694 /* Decode shifted register instructions. */
6696 static int
6697 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6698 uint16_t insn2, struct regcache *regs,
6699 arm_displaced_step_closure *dsc)
6701 /* PC is only allowed to be used in instruction MOV. */
6703 unsigned int op = bits (insn1, 5, 8);
6704 unsigned int rn = bits (insn1, 0, 3);
6706 if (op == 0x2 && rn == 0xf) /* MOV */
6707 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6708 else
6709 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6710 "dp (shift reg)", dsc);
6714 /* Decode extension register load/store. Exactly the same as
6715 arm_decode_ext_reg_ld_st. */
6717 static int
6718 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6719 uint16_t insn2, struct regcache *regs,
6720 arm_displaced_step_closure *dsc)
6722 unsigned int opcode = bits (insn1, 4, 8);
6724 switch (opcode)
6726 case 0x04: case 0x05:
6727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6728 "vfp/neon vmov", dsc);
6730 case 0x08: case 0x0c: /* 01x00 */
6731 case 0x0a: case 0x0e: /* 01x10 */
6732 case 0x12: case 0x16: /* 10x10 */
6733 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6734 "vfp/neon vstm/vpush", dsc);
6736 case 0x09: case 0x0d: /* 01x01 */
6737 case 0x0b: case 0x0f: /* 01x11 */
6738 case 0x13: case 0x17: /* 10x11 */
6739 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6740 "vfp/neon vldm/vpop", dsc);
6742 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6743 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6744 "vstr", dsc);
6745 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6746 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6749 /* Should be unreachable. */
6750 return 1;
6753 static int
6754 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6755 struct regcache *regs, arm_displaced_step_closure *dsc)
6757 unsigned int op1 = bits (insn, 20, 25);
6758 int op = bit (insn, 4);
6759 unsigned int coproc = bits (insn, 8, 11);
6761 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6762 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6763 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6764 && (coproc & 0xe) != 0xa)
6765 /* stc/stc2. */
6766 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6767 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6768 && (coproc & 0xe) != 0xa)
6769 /* ldc/ldc2 imm/lit. */
6770 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6771 else if ((op1 & 0x3e) == 0x00)
6772 return arm_copy_undef (gdbarch, insn, dsc);
6773 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6774 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6775 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6776 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6777 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6778 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6779 else if ((op1 & 0x30) == 0x20 && !op)
6781 if ((coproc & 0xe) == 0xa)
6782 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6783 else
6784 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6786 else if ((op1 & 0x30) == 0x20 && op)
6787 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6788 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6789 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6790 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6791 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6792 else if ((op1 & 0x30) == 0x30)
6793 return arm_copy_svc (gdbarch, insn, regs, dsc);
6794 else
6795 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6798 static int
6799 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6800 uint16_t insn2, struct regcache *regs,
6801 arm_displaced_step_closure *dsc)
6803 unsigned int coproc = bits (insn2, 8, 11);
6804 unsigned int bit_5_8 = bits (insn1, 5, 8);
6805 unsigned int bit_9 = bit (insn1, 9);
6806 unsigned int bit_4 = bit (insn1, 4);
6808 if (bit_9 == 0)
6810 if (bit_5_8 == 2)
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6812 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6813 dsc);
6814 else if (bit_5_8 == 0) /* UNDEFINED. */
6815 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6816 else
6818 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6819 if ((coproc & 0xe) == 0xa)
6820 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6821 dsc);
6822 else /* coproc is not 101x. */
6824 if (bit_4 == 0) /* STC/STC2. */
6825 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6826 "stc/stc2", dsc);
6827 else /* LDC/LDC2 {literal, immeidate}. */
6828 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6829 regs, dsc);
6833 else
6834 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6836 return 0;
6839 static void
6840 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6841 arm_displaced_step_closure *dsc, int rd)
6843 /* ADR Rd, #imm
6845 Rewrite as:
6847 Preparation: Rd <- PC
6848 Insn: ADD Rd, #imm
6849 Cleanup: Null.
6852 /* Rd <- PC */
6853 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6854 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6857 static int
6858 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6859 arm_displaced_step_closure *dsc,
6860 int rd, unsigned int imm)
6863 /* Encoding T2: ADDS Rd, #imm */
6864 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6866 install_pc_relative (gdbarch, regs, dsc, rd);
6868 return 0;
6871 static int
6872 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6873 struct regcache *regs,
6874 arm_displaced_step_closure *dsc)
6876 unsigned int rd = bits (insn, 8, 10);
6877 unsigned int imm8 = bits (insn, 0, 7);
6879 if (debug_displaced)
6880 fprintf_unfiltered (gdb_stdlog,
6881 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6882 rd, imm8, insn);
6884 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6887 static int
6888 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6889 uint16_t insn2, struct regcache *regs,
6890 arm_displaced_step_closure *dsc)
6892 unsigned int rd = bits (insn2, 8, 11);
6893 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6894 extract raw immediate encoding rather than computing immediate. When
6895 generating ADD or SUB instruction, we can simply perform OR operation to
6896 set immediate into ADD. */
6897 unsigned int imm_3_8 = insn2 & 0x70ff;
6898 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6900 if (debug_displaced)
6901 fprintf_unfiltered (gdb_stdlog,
6902 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6903 rd, imm_i, imm_3_8, insn1, insn2);
6905 if (bit (insn1, 7)) /* Encoding T2 */
6907 /* Encoding T3: SUB Rd, Rd, #imm */
6908 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6909 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6911 else /* Encoding T3 */
6913 /* Encoding T3: ADD Rd, Rd, #imm */
6914 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6915 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6917 dsc->numinsns = 2;
6919 install_pc_relative (gdbarch, regs, dsc, rd);
6921 return 0;
6924 static int
6925 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6926 struct regcache *regs,
6927 arm_displaced_step_closure *dsc)
6929 unsigned int rt = bits (insn1, 8, 10);
6930 unsigned int pc;
6931 int imm8 = (bits (insn1, 0, 7) << 2);
6933 /* LDR Rd, #imm8
6935 Rwrite as:
6937 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6939 Insn: LDR R0, [R2, R3];
6940 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6942 if (debug_displaced)
6943 fprintf_unfiltered (gdb_stdlog,
6944 "displaced: copying thumb ldr r%d [pc #%d]\n"
6945 , rt, imm8);
6947 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6948 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6949 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6950 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6951 /* The assembler calculates the required value of the offset from the
6952 Align(PC,4) value of this instruction to the label. */
6953 pc = pc & 0xfffffffc;
6955 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6956 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6958 dsc->rd = rt;
6959 dsc->u.ldst.xfersize = 4;
6960 dsc->u.ldst.rn = 0;
6961 dsc->u.ldst.immed = 0;
6962 dsc->u.ldst.writeback = 0;
6963 dsc->u.ldst.restore_r4 = 0;
6965 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6967 dsc->cleanup = &cleanup_load;
6969 return 0;
6972 /* Copy Thumb cbnz/cbz insruction. */
6974 static int
6975 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6976 struct regcache *regs,
6977 arm_displaced_step_closure *dsc)
6979 int non_zero = bit (insn1, 11);
6980 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6981 CORE_ADDR from = dsc->insn_addr;
6982 int rn = bits (insn1, 0, 2);
6983 int rn_val = displaced_read_reg (regs, dsc, rn);
6985 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6986 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6987 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6988 condition is false, let it be, cleanup_branch will do nothing. */
6989 if (dsc->u.branch.cond)
6991 dsc->u.branch.cond = INST_AL;
6992 dsc->u.branch.dest = from + 4 + imm5;
6994 else
6995 dsc->u.branch.dest = from + 2;
6997 dsc->u.branch.link = 0;
6998 dsc->u.branch.exchange = 0;
7000 if (debug_displaced)
7001 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7002 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7003 rn, rn_val, insn1, dsc->u.branch.dest);
7005 dsc->modinsn[0] = THUMB_NOP;
7007 dsc->cleanup = &cleanup_branch;
7008 return 0;
7011 /* Copy Table Branch Byte/Halfword */
7012 static int
7013 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7014 uint16_t insn2, struct regcache *regs,
7015 arm_displaced_step_closure *dsc)
7017 ULONGEST rn_val, rm_val;
7018 int is_tbh = bit (insn2, 4);
7019 CORE_ADDR halfwords = 0;
7020 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7022 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7023 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7025 if (is_tbh)
7027 gdb_byte buf[2];
7029 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7030 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7032 else
7034 gdb_byte buf[1];
7036 target_read_memory (rn_val + rm_val, buf, 1);
7037 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7040 if (debug_displaced)
7041 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7042 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7043 (unsigned int) rn_val, (unsigned int) rm_val,
7044 (unsigned int) halfwords);
7046 dsc->u.branch.cond = INST_AL;
7047 dsc->u.branch.link = 0;
7048 dsc->u.branch.exchange = 0;
7049 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7051 dsc->cleanup = &cleanup_branch;
7053 return 0;
7056 static void
7057 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7058 arm_displaced_step_closure *dsc)
7060 /* PC <- r7 */
7061 int val = displaced_read_reg (regs, dsc, 7);
7062 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7064 /* r7 <- r8 */
7065 val = displaced_read_reg (regs, dsc, 8);
7066 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7068 /* r8 <- tmp[0] */
7069 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7073 static int
7074 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7075 struct regcache *regs,
7076 arm_displaced_step_closure *dsc)
7078 dsc->u.block.regmask = insn1 & 0x00ff;
7080 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7081 to :
7083 (1) register list is full, that is, r0-r7 are used.
7084 Prepare: tmp[0] <- r8
7086 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7087 MOV r8, r7; Move value of r7 to r8;
7088 POP {r7}; Store PC value into r7.
7090 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7092 (2) register list is not full, supposing there are N registers in
7093 register list (except PC, 0 <= N <= 7).
7094 Prepare: for each i, 0 - N, tmp[i] <- ri.
7096 POP {r0, r1, ...., rN};
7098 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7099 from tmp[] properly.
7101 if (debug_displaced)
7102 fprintf_unfiltered (gdb_stdlog,
7103 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7104 dsc->u.block.regmask, insn1);
7106 if (dsc->u.block.regmask == 0xff)
7108 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7110 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7111 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7112 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7114 dsc->numinsns = 3;
7115 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7117 else
7119 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7120 unsigned int i;
7121 unsigned int new_regmask;
7123 for (i = 0; i < num_in_list + 1; i++)
7124 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7126 new_regmask = (1 << (num_in_list + 1)) - 1;
7128 if (debug_displaced)
7129 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7130 "{..., pc}: original reg list %.4x,"
7131 " modified list %.4x\n"),
7132 (int) dsc->u.block.regmask, new_regmask);
7134 dsc->u.block.regmask |= 0x8000;
7135 dsc->u.block.writeback = 0;
7136 dsc->u.block.cond = INST_AL;
7138 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7140 dsc->cleanup = &cleanup_block_load_pc;
7143 return 0;
7146 static void
7147 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7148 struct regcache *regs,
7149 arm_displaced_step_closure *dsc)
7151 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7152 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7153 int err = 0;
7155 /* 16-bit thumb instructions. */
7156 switch (op_bit_12_15)
7158 /* Shift (imme), add, subtract, move and compare. */
7159 case 0: case 1: case 2: case 3:
7160 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7161 "shift/add/sub/mov/cmp",
7162 dsc);
7163 break;
7164 case 4:
7165 switch (op_bit_10_11)
7167 case 0: /* Data-processing */
7168 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7169 "data-processing",
7170 dsc);
7171 break;
7172 case 1: /* Special data instructions and branch and exchange. */
7174 unsigned short op = bits (insn1, 7, 9);
7175 if (op == 6 || op == 7) /* BX or BLX */
7176 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7177 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7178 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7179 else
7180 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7181 dsc);
7183 break;
7184 default: /* LDR (literal) */
7185 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7187 break;
7188 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7189 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7190 break;
7191 case 10:
7192 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7193 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7194 else /* Generate SP-relative address */
7195 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7196 break;
7197 case 11: /* Misc 16-bit instructions */
7199 switch (bits (insn1, 8, 11))
7201 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7202 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7203 break;
7204 case 12: case 13: /* POP */
7205 if (bit (insn1, 8)) /* PC is in register list. */
7206 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7207 else
7208 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7209 break;
7210 case 15: /* If-Then, and hints */
7211 if (bits (insn1, 0, 3))
7212 /* If-Then makes up to four following instructions conditional.
7213 IT instruction itself is not conditional, so handle it as a
7214 common unmodified instruction. */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7216 dsc);
7217 else
7218 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7219 break;
7220 default:
7221 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7224 break;
7225 case 12:
7226 if (op_bit_10_11 < 2) /* Store multiple registers */
7227 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7228 else /* Load multiple registers */
7229 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7230 break;
7231 case 13: /* Conditional branch and supervisor call */
7232 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7233 err = thumb_copy_b (gdbarch, insn1, dsc);
7234 else
7235 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7236 break;
7237 case 14: /* Unconditional branch */
7238 err = thumb_copy_b (gdbarch, insn1, dsc);
7239 break;
7240 default:
7241 err = 1;
7244 if (err)
7245 internal_error (__FILE__, __LINE__,
7246 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7249 static int
7250 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7251 uint16_t insn1, uint16_t insn2,
7252 struct regcache *regs,
7253 arm_displaced_step_closure *dsc)
7255 int rt = bits (insn2, 12, 15);
7256 int rn = bits (insn1, 0, 3);
7257 int op1 = bits (insn1, 7, 8);
7259 switch (bits (insn1, 5, 6))
7261 case 0: /* Load byte and memory hints */
7262 if (rt == 0xf) /* PLD/PLI */
7264 if (rn == 0xf)
7265 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7266 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7267 else
7268 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7269 "pli/pld", dsc);
7271 else
7273 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7274 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7276 else
7277 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7278 "ldrb{reg, immediate}/ldrbt",
7279 dsc);
7282 break;
7283 case 1: /* Load halfword and memory hints. */
7284 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7285 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7286 "pld/unalloc memhint", dsc);
7287 else
7289 if (rn == 0xf)
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7292 else
7293 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7294 "ldrh/ldrht", dsc);
7296 break;
7297 case 2: /* Load word */
7299 int insn2_bit_8_11 = bits (insn2, 8, 11);
7301 if (rn == 0xf)
7302 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7303 else if (op1 == 0x1) /* Encoding T3 */
7304 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7305 0, 1);
7306 else /* op1 == 0x0 */
7308 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7309 /* LDR (immediate) */
7310 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7311 dsc, bit (insn2, 8), 1);
7312 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7313 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7314 "ldrt", dsc);
7315 else
7316 /* LDR (register) */
7317 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7318 dsc, 0, 0);
7320 break;
7322 default:
7323 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7324 break;
7326 return 0;
7329 static void
7330 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7331 uint16_t insn2, struct regcache *regs,
7332 arm_displaced_step_closure *dsc)
7334 int err = 0;
7335 unsigned short op = bit (insn2, 15);
7336 unsigned int op1 = bits (insn1, 11, 12);
7338 switch (op1)
7340 case 1:
7342 switch (bits (insn1, 9, 10))
7344 case 0:
7345 if (bit (insn1, 6))
7347 /* Load/store {dual, execlusive}, table branch. */
7348 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7349 && bits (insn2, 5, 7) == 0)
7350 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7351 dsc);
7352 else
7353 /* PC is not allowed to use in load/store {dual, exclusive}
7354 instructions. */
7355 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7356 "load/store dual/ex", dsc);
7358 else /* load/store multiple */
7360 switch (bits (insn1, 7, 8))
7362 case 0: case 3: /* SRS, RFE */
7363 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7364 "srs/rfe", dsc);
7365 break;
7366 case 1: case 2: /* LDM/STM/PUSH/POP */
7367 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7368 break;
7371 break;
7373 case 1:
7374 /* Data-processing (shift register). */
7375 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7376 dsc);
7377 break;
7378 default: /* Coprocessor instructions. */
7379 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7380 break;
7382 break;
7384 case 2: /* op1 = 2 */
7385 if (op) /* Branch and misc control. */
7387 if (bit (insn2, 14) /* BLX/BL */
7388 || bit (insn2, 12) /* Unconditional branch */
7389 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7390 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7391 else
7392 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7393 "misc ctrl", dsc);
7395 else
7397 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7399 int dp_op = bits (insn1, 4, 8);
7400 int rn = bits (insn1, 0, 3);
7401 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7402 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7403 regs, dsc);
7404 else
7405 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7406 "dp/pb", dsc);
7408 else /* Data processing (modified immeidate) */
7409 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7410 "dp/mi", dsc);
7412 break;
7413 case 3: /* op1 = 3 */
7414 switch (bits (insn1, 9, 10))
7416 case 0:
7417 if (bit (insn1, 4))
7418 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7419 regs, dsc);
7420 else /* NEON Load/Store and Store single data item */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "neon elt/struct load/store",
7423 dsc);
7424 break;
7425 case 1: /* op1 = 3, bits (9, 10) == 1 */
7426 switch (bits (insn1, 7, 8))
7428 case 0: case 1: /* Data processing (register) */
7429 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7430 "dp(reg)", dsc);
7431 break;
7432 case 2: /* Multiply and absolute difference */
7433 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7434 "mul/mua/diff", dsc);
7435 break;
7436 case 3: /* Long multiply and divide */
7437 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7438 "lmul/lmua", dsc);
7439 break;
7441 break;
7442 default: /* Coprocessor instructions */
7443 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7444 break;
7446 break;
7447 default:
7448 err = 1;
7451 if (err)
7452 internal_error (__FILE__, __LINE__,
7453 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7457 static void
7458 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7459 struct regcache *regs,
7460 arm_displaced_step_closure *dsc)
7462 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7463 uint16_t insn1
7464 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7466 if (debug_displaced)
7467 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7468 "at %.8lx\n", insn1, (unsigned long) from);
7470 dsc->is_thumb = 1;
7471 dsc->insn_size = thumb_insn_size (insn1);
7472 if (thumb_insn_size (insn1) == 4)
7474 uint16_t insn2
7475 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7476 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7478 else
7479 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7482 void
7483 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7484 CORE_ADDR to, struct regcache *regs,
7485 arm_displaced_step_closure *dsc)
7487 int err = 0;
7488 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7489 uint32_t insn;
7491 /* Most displaced instructions use a 1-instruction scratch space, so set this
7492 here and override below if/when necessary. */
7493 dsc->numinsns = 1;
7494 dsc->insn_addr = from;
7495 dsc->scratch_base = to;
7496 dsc->cleanup = NULL;
7497 dsc->wrote_to_pc = 0;
7499 if (!displaced_in_arm_mode (regs))
7500 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7502 dsc->is_thumb = 0;
7503 dsc->insn_size = 4;
7504 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7505 if (debug_displaced)
7506 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7507 "at %.8lx\n", (unsigned long) insn,
7508 (unsigned long) from);
7510 if ((insn & 0xf0000000) == 0xf0000000)
7511 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7512 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7514 case 0x0: case 0x1: case 0x2: case 0x3:
7515 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7516 break;
7518 case 0x4: case 0x5: case 0x6:
7519 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7520 break;
7522 case 0x7:
7523 err = arm_decode_media (gdbarch, insn, dsc);
7524 break;
7526 case 0x8: case 0x9: case 0xa: case 0xb:
7527 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7528 break;
7530 case 0xc: case 0xd: case 0xe: case 0xf:
7531 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7532 break;
7535 if (err)
7536 internal_error (__FILE__, __LINE__,
7537 _("arm_process_displaced_insn: Instruction decode error"));
7540 /* Actually set up the scratch space for a displaced instruction. */
7542 void
7543 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7544 CORE_ADDR to, arm_displaced_step_closure *dsc)
7546 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7547 unsigned int i, len, offset;
7548 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7549 int size = dsc->is_thumb? 2 : 4;
7550 const gdb_byte *bkp_insn;
7552 offset = 0;
7553 /* Poke modified instruction(s). */
7554 for (i = 0; i < dsc->numinsns; i++)
7556 if (debug_displaced)
7558 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7559 if (size == 4)
7560 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7561 dsc->modinsn[i]);
7562 else if (size == 2)
7563 fprintf_unfiltered (gdb_stdlog, "%.4x",
7564 (unsigned short)dsc->modinsn[i]);
7566 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7567 (unsigned long) to + offset);
7570 write_memory_unsigned_integer (to + offset, size,
7571 byte_order_for_code,
7572 dsc->modinsn[i]);
7573 offset += size;
7576 /* Choose the correct breakpoint instruction. */
7577 if (dsc->is_thumb)
7579 bkp_insn = tdep->thumb_breakpoint;
7580 len = tdep->thumb_breakpoint_size;
7582 else
7584 bkp_insn = tdep->arm_breakpoint;
7585 len = tdep->arm_breakpoint_size;
7588 /* Put breakpoint afterwards. */
7589 write_memory (to + offset, bkp_insn, len);
7591 if (debug_displaced)
7592 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7593 paddress (gdbarch, from), paddress (gdbarch, to));
7596 /* Entry point for cleaning things up after a displaced instruction has been
7597 single-stepped. */
7599 void
7600 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7601 struct displaced_step_closure *dsc_,
7602 CORE_ADDR from, CORE_ADDR to,
7603 struct regcache *regs)
7605 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7607 if (dsc->cleanup)
7608 dsc->cleanup (gdbarch, regs, dsc);
7610 if (!dsc->wrote_to_pc)
7611 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7612 dsc->insn_addr + dsc->insn_size);
7616 #include "bfd-in2.h"
7617 #include "libcoff.h"
7619 static int
7620 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7622 gdb_disassembler *di
7623 = static_cast<gdb_disassembler *>(info->application_data);
7624 struct gdbarch *gdbarch = di->arch ();
7626 if (arm_pc_is_thumb (gdbarch, memaddr))
7628 static asymbol *asym;
7629 static combined_entry_type ce;
7630 static struct coff_symbol_struct csym;
7631 static struct bfd fake_bfd;
7632 static bfd_target fake_target;
7634 if (csym.native == NULL)
7636 /* Create a fake symbol vector containing a Thumb symbol.
7637 This is solely so that the code in print_insn_little_arm()
7638 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7639 the presence of a Thumb symbol and switch to decoding
7640 Thumb instructions. */
7642 fake_target.flavour = bfd_target_coff_flavour;
7643 fake_bfd.xvec = &fake_target;
7644 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7645 csym.native = &ce;
7646 csym.symbol.the_bfd = &fake_bfd;
7647 csym.symbol.name = "fake";
7648 asym = (asymbol *) & csym;
7651 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7652 info->symbols = &asym;
7654 else
7655 info->symbols = NULL;
7657 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7658 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7659 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7660 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7661 in default_print_insn. */
7662 if (exec_bfd != NULL)
7663 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7665 return default_print_insn (memaddr, info);
7668 /* The following define instruction sequences that will cause ARM
7669 cpu's to take an undefined instruction trap. These are used to
7670 signal a breakpoint to GDB.
7672 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7673 modes. A different instruction is required for each mode. The ARM
7674 cpu's can also be big or little endian. Thus four different
7675 instructions are needed to support all cases.
7677 Note: ARMv4 defines several new instructions that will take the
7678 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7679 not in fact add the new instructions. The new undefined
7680 instructions in ARMv4 are all instructions that had no defined
7681 behaviour in earlier chips. There is no guarantee that they will
7682 raise an exception, but may be treated as NOP's. In practice, it
7683 may only safe to rely on instructions matching:
7685 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7686 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7687 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7689 Even this may only true if the condition predicate is true. The
7690 following use a condition predicate of ALWAYS so it is always TRUE.
7692 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7693 and NetBSD all use a software interrupt rather than an undefined
7694 instruction to force a trap. This can be handled by by the
7695 abi-specific code during establishment of the gdbarch vector. */
7697 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7698 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7699 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7700 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7702 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7703 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7704 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7705 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7707 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7709 static int
7710 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7712 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7713 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7715 if (arm_pc_is_thumb (gdbarch, *pcptr))
7717 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7719 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7720 check whether we are replacing a 32-bit instruction. */
7721 if (tdep->thumb2_breakpoint != NULL)
7723 gdb_byte buf[2];
7725 if (target_read_memory (*pcptr, buf, 2) == 0)
7727 unsigned short inst1;
7729 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7730 if (thumb_insn_size (inst1) == 4)
7731 return ARM_BP_KIND_THUMB2;
7735 return ARM_BP_KIND_THUMB;
7737 else
7738 return ARM_BP_KIND_ARM;
7742 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7744 static const gdb_byte *
7745 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7747 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7749 switch (kind)
7751 case ARM_BP_KIND_ARM:
7752 *size = tdep->arm_breakpoint_size;
7753 return tdep->arm_breakpoint;
7754 case ARM_BP_KIND_THUMB:
7755 *size = tdep->thumb_breakpoint_size;
7756 return tdep->thumb_breakpoint;
7757 case ARM_BP_KIND_THUMB2:
7758 *size = tdep->thumb2_breakpoint_size;
7759 return tdep->thumb2_breakpoint;
7760 default:
7761 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7765 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7767 static int
7768 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7769 struct regcache *regcache,
7770 CORE_ADDR *pcptr)
7772 gdb_byte buf[4];
7774 /* Check the memory pointed by PC is readable. */
7775 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7777 struct arm_get_next_pcs next_pcs_ctx;
7779 arm_get_next_pcs_ctor (&next_pcs_ctx,
7780 &arm_get_next_pcs_ops,
7781 gdbarch_byte_order (gdbarch),
7782 gdbarch_byte_order_for_code (gdbarch),
7784 regcache);
7786 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7788 /* If MEMADDR is the next instruction of current pc, do the
7789 software single step computation, and get the thumb mode by
7790 the destination address. */
7791 for (CORE_ADDR pc : next_pcs)
7793 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7795 if (IS_THUMB_ADDR (pc))
7797 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7798 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7800 else
7801 return ARM_BP_KIND_ARM;
7806 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7809 /* Extract from an array REGBUF containing the (raw) register state a
7810 function return value of type TYPE, and copy that, in virtual
7811 format, into VALBUF. */
7813 static void
7814 arm_extract_return_value (struct type *type, struct regcache *regs,
7815 gdb_byte *valbuf)
7817 struct gdbarch *gdbarch = regs->arch ();
7818 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7820 if (TYPE_CODE_FLT == TYPE_CODE (type))
7822 switch (gdbarch_tdep (gdbarch)->fp_model)
7824 case ARM_FLOAT_FPA:
7826 /* The value is in register F0 in internal format. We need to
7827 extract the raw value and then convert it to the desired
7828 internal type. */
7829 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7831 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7832 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7833 valbuf, type);
7835 break;
7837 case ARM_FLOAT_SOFT_FPA:
7838 case ARM_FLOAT_SOFT_VFP:
7839 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7840 not using the VFP ABI code. */
7841 case ARM_FLOAT_VFP:
7842 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7843 if (TYPE_LENGTH (type) > 4)
7844 regs->cooked_read (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
7845 break;
7847 default:
7848 internal_error (__FILE__, __LINE__,
7849 _("arm_extract_return_value: "
7850 "Floating point model not supported"));
7851 break;
7854 else if (TYPE_CODE (type) == TYPE_CODE_INT
7855 || TYPE_CODE (type) == TYPE_CODE_CHAR
7856 || TYPE_CODE (type) == TYPE_CODE_BOOL
7857 || TYPE_CODE (type) == TYPE_CODE_PTR
7858 || TYPE_IS_REFERENCE (type)
7859 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7861 /* If the type is a plain integer, then the access is
7862 straight-forward. Otherwise we have to play around a bit
7863 more. */
7864 int len = TYPE_LENGTH (type);
7865 int regno = ARM_A1_REGNUM;
7866 ULONGEST tmp;
7868 while (len > 0)
7870 /* By using store_unsigned_integer we avoid having to do
7871 anything special for small big-endian values. */
7872 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7873 store_unsigned_integer (valbuf,
7874 (len > INT_REGISTER_SIZE
7875 ? INT_REGISTER_SIZE : len),
7876 byte_order, tmp);
7877 len -= INT_REGISTER_SIZE;
7878 valbuf += INT_REGISTER_SIZE;
7881 else
7883 /* For a structure or union the behaviour is as if the value had
7884 been stored to word-aligned memory and then loaded into
7885 registers with 32-bit load instruction(s). */
7886 int len = TYPE_LENGTH (type);
7887 int regno = ARM_A1_REGNUM;
7888 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7890 while (len > 0)
7892 regs->cooked_read (regno++, tmpbuf);
7893 memcpy (valbuf, tmpbuf,
7894 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7895 len -= INT_REGISTER_SIZE;
7896 valbuf += INT_REGISTER_SIZE;
7902 /* Will a function return an aggregate type in memory or in a
7903 register? Return 0 if an aggregate type can be returned in a
7904 register, 1 if it must be returned in memory. */
7906 static int
7907 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7909 enum type_code code;
7911 type = check_typedef (type);
7913 /* Simple, non-aggregate types (ie not including vectors and
7914 complex) are always returned in a register (or registers). */
7915 code = TYPE_CODE (type);
7916 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7917 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7918 return 0;
7920 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7922 /* Vector values should be returned using ARM registers if they
7923 are not over 16 bytes. */
7924 return (TYPE_LENGTH (type) > 16);
7927 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7929 /* The AAPCS says all aggregates not larger than a word are returned
7930 in a register. */
7931 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7932 return 0;
7934 return 1;
7936 else
7938 int nRc;
7940 /* All aggregate types that won't fit in a register must be returned
7941 in memory. */
7942 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7943 return 1;
7945 /* In the ARM ABI, "integer" like aggregate types are returned in
7946 registers. For an aggregate type to be integer like, its size
7947 must be less than or equal to INT_REGISTER_SIZE and the
7948 offset of each addressable subfield must be zero. Note that bit
7949 fields are not addressable, and all addressable subfields of
7950 unions always start at offset zero.
7952 This function is based on the behaviour of GCC 2.95.1.
7953 See: gcc/arm.c: arm_return_in_memory() for details.
7955 Note: All versions of GCC before GCC 2.95.2 do not set up the
7956 parameters correctly for a function returning the following
7957 structure: struct { float f;}; This should be returned in memory,
7958 not a register. Richard Earnshaw sent me a patch, but I do not
7959 know of any way to detect if a function like the above has been
7960 compiled with the correct calling convention. */
7962 /* Assume all other aggregate types can be returned in a register.
7963 Run a check for structures, unions and arrays. */
7964 nRc = 0;
7966 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7968 int i;
7969 /* Need to check if this struct/union is "integer" like. For
7970 this to be true, its size must be less than or equal to
7971 INT_REGISTER_SIZE and the offset of each addressable
7972 subfield must be zero. Note that bit fields are not
7973 addressable, and unions always start at offset zero. If any
7974 of the subfields is a floating point type, the struct/union
7975 cannot be an integer type. */
7977 /* For each field in the object, check:
7978 1) Is it FP? --> yes, nRc = 1;
7979 2) Is it addressable (bitpos != 0) and
7980 not packed (bitsize == 0)?
7981 --> yes, nRc = 1
7984 for (i = 0; i < TYPE_NFIELDS (type); i++)
7986 enum type_code field_type_code;
7988 field_type_code
7989 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7990 i)));
7992 /* Is it a floating point type field? */
7993 if (field_type_code == TYPE_CODE_FLT)
7995 nRc = 1;
7996 break;
7999 /* If bitpos != 0, then we have to care about it. */
8000 if (TYPE_FIELD_BITPOS (type, i) != 0)
8002 /* Bitfields are not addressable. If the field bitsize is
8003 zero, then the field is not packed. Hence it cannot be
8004 a bitfield or any other packed type. */
8005 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8007 nRc = 1;
8008 break;
8014 return nRc;
8018 /* Write into appropriate registers a function return value of type
8019 TYPE, given in virtual format. */
8021 static void
8022 arm_store_return_value (struct type *type, struct regcache *regs,
8023 const gdb_byte *valbuf)
8025 struct gdbarch *gdbarch = regs->arch ();
8026 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8028 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8030 gdb_byte buf[FP_REGISTER_SIZE];
8032 switch (gdbarch_tdep (gdbarch)->fp_model)
8034 case ARM_FLOAT_FPA:
8036 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8037 regs->cooked_write (ARM_F0_REGNUM, buf);
8038 break;
8040 case ARM_FLOAT_SOFT_FPA:
8041 case ARM_FLOAT_SOFT_VFP:
8042 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8043 not using the VFP ABI code. */
8044 case ARM_FLOAT_VFP:
8045 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8046 if (TYPE_LENGTH (type) > 4)
8047 regs->cooked_write (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
8048 break;
8050 default:
8051 internal_error (__FILE__, __LINE__,
8052 _("arm_store_return_value: Floating "
8053 "point model not supported"));
8054 break;
8057 else if (TYPE_CODE (type) == TYPE_CODE_INT
8058 || TYPE_CODE (type) == TYPE_CODE_CHAR
8059 || TYPE_CODE (type) == TYPE_CODE_BOOL
8060 || TYPE_CODE (type) == TYPE_CODE_PTR
8061 || TYPE_IS_REFERENCE (type)
8062 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8064 if (TYPE_LENGTH (type) <= 4)
8066 /* Values of one word or less are zero/sign-extended and
8067 returned in r0. */
8068 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8069 LONGEST val = unpack_long (type, valbuf);
8071 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8072 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8074 else
8076 /* Integral values greater than one word are stored in consecutive
8077 registers starting with r0. This will always be a multiple of
8078 the regiser size. */
8079 int len = TYPE_LENGTH (type);
8080 int regno = ARM_A1_REGNUM;
8082 while (len > 0)
8084 regs->cooked_write (regno++, valbuf);
8085 len -= INT_REGISTER_SIZE;
8086 valbuf += INT_REGISTER_SIZE;
8090 else
8092 /* For a structure or union the behaviour is as if the value had
8093 been stored to word-aligned memory and then loaded into
8094 registers with 32-bit load instruction(s). */
8095 int len = TYPE_LENGTH (type);
8096 int regno = ARM_A1_REGNUM;
8097 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8099 while (len > 0)
8101 memcpy (tmpbuf, valbuf,
8102 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8103 regs->cooked_write (regno++, tmpbuf);
8104 len -= INT_REGISTER_SIZE;
8105 valbuf += INT_REGISTER_SIZE;
8111 /* Handle function return values. */
8113 static enum return_value_convention
8114 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8115 struct type *valtype, struct regcache *regcache,
8116 gdb_byte *readbuf, const gdb_byte *writebuf)
8118 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8119 struct type *func_type = function ? value_type (function) : NULL;
8120 enum arm_vfp_cprc_base_type vfp_base_type;
8121 int vfp_base_count;
8123 if (arm_vfp_abi_for_function (gdbarch, func_type)
8124 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8126 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8127 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8128 int i;
8129 for (i = 0; i < vfp_base_count; i++)
8131 if (reg_char == 'q')
8133 if (writebuf)
8134 arm_neon_quad_write (gdbarch, regcache, i,
8135 writebuf + i * unit_length);
8137 if (readbuf)
8138 arm_neon_quad_read (gdbarch, regcache, i,
8139 readbuf + i * unit_length);
8141 else
8143 char name_buf[4];
8144 int regnum;
8146 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8147 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8148 strlen (name_buf));
8149 if (writebuf)
8150 regcache->cooked_write (regnum, writebuf + i * unit_length);
8151 if (readbuf)
8152 regcache->cooked_read (regnum, readbuf + i * unit_length);
8155 return RETURN_VALUE_REGISTER_CONVENTION;
8158 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8159 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8160 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8162 if (tdep->struct_return == pcc_struct_return
8163 || arm_return_in_memory (gdbarch, valtype))
8164 return RETURN_VALUE_STRUCT_CONVENTION;
8166 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8168 if (arm_return_in_memory (gdbarch, valtype))
8169 return RETURN_VALUE_STRUCT_CONVENTION;
8172 if (writebuf)
8173 arm_store_return_value (valtype, regcache, writebuf);
8175 if (readbuf)
8176 arm_extract_return_value (valtype, regcache, readbuf);
8178 return RETURN_VALUE_REGISTER_CONVENTION;
8182 static int
8183 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8185 struct gdbarch *gdbarch = get_frame_arch (frame);
8186 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8187 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8188 CORE_ADDR jb_addr;
8189 gdb_byte buf[INT_REGISTER_SIZE];
8191 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8193 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8194 INT_REGISTER_SIZE))
8195 return 0;
8197 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8198 return 1;
8201 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8202 return the target PC. Otherwise return 0. */
8204 CORE_ADDR
8205 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8207 const char *name;
8208 int namelen;
8209 CORE_ADDR start_addr;
8211 /* Find the starting address and name of the function containing the PC. */
8212 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8214 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8215 check here. */
8216 start_addr = arm_skip_bx_reg (frame, pc);
8217 if (start_addr != 0)
8218 return start_addr;
8220 return 0;
8223 /* If PC is in a Thumb call or return stub, return the address of the
8224 target PC, which is in a register. The thunk functions are called
8225 _call_via_xx, where x is the register name. The possible names
8226 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8227 functions, named __ARM_call_via_r[0-7]. */
8228 if (startswith (name, "_call_via_")
8229 || startswith (name, "__ARM_call_via_"))
8231 /* Use the name suffix to determine which register contains the
8232 target PC. */
8233 static const char *table[15] =
8234 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8235 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8237 int regno;
8238 int offset = strlen (name) - 2;
8240 for (regno = 0; regno <= 14; regno++)
8241 if (strcmp (&name[offset], table[regno]) == 0)
8242 return get_frame_register_unsigned (frame, regno);
8245 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8246 non-interworking calls to foo. We could decode the stubs
8247 to find the target but it's easier to use the symbol table. */
8248 namelen = strlen (name);
8249 if (name[0] == '_' && name[1] == '_'
8250 && ((namelen > 2 + strlen ("_from_thumb")
8251 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8252 || (namelen > 2 + strlen ("_from_arm")
8253 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8255 char *target_name;
8256 int target_len = namelen - 2;
8257 struct bound_minimal_symbol minsym;
8258 struct objfile *objfile;
8259 struct obj_section *sec;
8261 if (name[namelen - 1] == 'b')
8262 target_len -= strlen ("_from_thumb");
8263 else
8264 target_len -= strlen ("_from_arm");
8266 target_name = (char *) alloca (target_len + 1);
8267 memcpy (target_name, name + 2, target_len);
8268 target_name[target_len] = '\0';
8270 sec = find_pc_section (pc);
8271 objfile = (sec == NULL) ? NULL : sec->objfile;
8272 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8273 if (minsym.minsym != NULL)
8274 return BMSYMBOL_VALUE_ADDRESS (minsym);
8275 else
8276 return 0;
8279 return 0; /* not a stub */
8282 static void
8283 set_arm_command (const char *args, int from_tty)
8285 printf_unfiltered (_("\
8286 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8287 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8290 static void
8291 show_arm_command (const char *args, int from_tty)
8293 cmd_show_list (showarmcmdlist, from_tty, "");
8296 static void
8297 arm_update_current_architecture (void)
8299 struct gdbarch_info info;
8301 /* If the current architecture is not ARM, we have nothing to do. */
8302 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8303 return;
8305 /* Update the architecture. */
8306 gdbarch_info_init (&info);
8308 if (!gdbarch_update_p (info))
8309 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8312 static void
8313 set_fp_model_sfunc (const char *args, int from_tty,
8314 struct cmd_list_element *c)
8316 int fp_model;
8318 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8319 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8321 arm_fp_model = (enum arm_float_model) fp_model;
8322 break;
8325 if (fp_model == ARM_FLOAT_LAST)
8326 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8327 current_fp_model);
8329 arm_update_current_architecture ();
8332 static void
8333 show_fp_model (struct ui_file *file, int from_tty,
8334 struct cmd_list_element *c, const char *value)
8336 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8338 if (arm_fp_model == ARM_FLOAT_AUTO
8339 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8340 fprintf_filtered (file, _("\
8341 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8342 fp_model_strings[tdep->fp_model]);
8343 else
8344 fprintf_filtered (file, _("\
8345 The current ARM floating point model is \"%s\".\n"),
8346 fp_model_strings[arm_fp_model]);
8349 static void
8350 arm_set_abi (const char *args, int from_tty,
8351 struct cmd_list_element *c)
8353 int arm_abi;
8355 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8356 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8358 arm_abi_global = (enum arm_abi_kind) arm_abi;
8359 break;
8362 if (arm_abi == ARM_ABI_LAST)
8363 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8364 arm_abi_string);
8366 arm_update_current_architecture ();
8369 static void
8370 arm_show_abi (struct ui_file *file, int from_tty,
8371 struct cmd_list_element *c, const char *value)
8373 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8375 if (arm_abi_global == ARM_ABI_AUTO
8376 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8377 fprintf_filtered (file, _("\
8378 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8379 arm_abi_strings[tdep->arm_abi]);
8380 else
8381 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8382 arm_abi_string);
8385 static void
8386 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8387 struct cmd_list_element *c, const char *value)
8389 fprintf_filtered (file,
8390 _("The current execution mode assumed "
8391 "(when symbols are unavailable) is \"%s\".\n"),
8392 arm_fallback_mode_string);
8395 static void
8396 arm_show_force_mode (struct ui_file *file, int from_tty,
8397 struct cmd_list_element *c, const char *value)
8399 fprintf_filtered (file,
8400 _("The current execution mode assumed "
8401 "(even when symbols are available) is \"%s\".\n"),
8402 arm_force_mode_string);
8405 /* If the user changes the register disassembly style used for info
8406 register and other commands, we have to also switch the style used
8407 in opcodes for disassembly output. This function is run in the "set
8408 arm disassembly" command, and does that. */
8410 static void
8411 set_disassembly_style_sfunc (const char *args, int from_tty,
8412 struct cmd_list_element *c)
8414 /* Convert the short style name into the long style name (eg, reg-names-*)
8415 before calling the generic set_disassembler_options() function. */
8416 std::string long_name = std::string ("reg-names-") + disassembly_style;
8417 set_disassembler_options (&long_name[0]);
8420 static void
8421 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8422 struct cmd_list_element *c, const char *value)
8424 struct gdbarch *gdbarch = get_current_arch ();
8425 char *options = get_disassembler_options (gdbarch);
8426 const char *style = "";
8427 int len = 0;
8428 const char *opt;
8430 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8431 if (CONST_STRNEQ (opt, "reg-names-"))
8433 style = &opt[strlen ("reg-names-")];
8434 len = strcspn (style, ",");
8437 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8440 /* Return the ARM register name corresponding to register I. */
8441 static const char *
8442 arm_register_name (struct gdbarch *gdbarch, int i)
8444 const int num_regs = gdbarch_num_regs (gdbarch);
8446 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8447 && i >= num_regs && i < num_regs + 32)
8449 static const char *const vfp_pseudo_names[] = {
8450 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8451 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8452 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8453 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8456 return vfp_pseudo_names[i - num_regs];
8459 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8460 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8462 static const char *const neon_pseudo_names[] = {
8463 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8464 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8467 return neon_pseudo_names[i - num_regs - 32];
8470 if (i >= ARRAY_SIZE (arm_register_names))
8471 /* These registers are only supported on targets which supply
8472 an XML description. */
8473 return "";
8475 return arm_register_names[i];
8478 /* Test whether the coff symbol specific value corresponds to a Thumb
8479 function. */
8481 static int
8482 coff_sym_is_thumb (int val)
8484 return (val == C_THUMBEXT
8485 || val == C_THUMBSTAT
8486 || val == C_THUMBEXTFUNC
8487 || val == C_THUMBSTATFUNC
8488 || val == C_THUMBLABEL);
8491 /* arm_coff_make_msymbol_special()
8492 arm_elf_make_msymbol_special()
8494 These functions test whether the COFF or ELF symbol corresponds to
8495 an address in thumb code, and set a "special" bit in a minimal
8496 symbol to indicate that it does. */
8498 static void
8499 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8501 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8503 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8504 == ST_BRANCH_TO_THUMB)
8505 MSYMBOL_SET_SPECIAL (msym);
8508 static void
8509 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8511 if (coff_sym_is_thumb (val))
8512 MSYMBOL_SET_SPECIAL (msym);
8515 static void
8516 arm_objfile_data_free (struct objfile *objfile, void *arg)
8518 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8519 unsigned int i;
8521 for (i = 0; i < objfile->obfd->section_count; i++)
8522 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8525 static void
8526 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8527 asymbol *sym)
8529 const char *name = bfd_asymbol_name (sym);
8530 struct arm_per_objfile *data;
8531 VEC(arm_mapping_symbol_s) **map_p;
8532 struct arm_mapping_symbol new_map_sym;
8534 gdb_assert (name[0] == '$');
8535 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8536 return;
8538 data = (struct arm_per_objfile *) objfile_data (objfile,
8539 arm_objfile_data_key);
8540 if (data == NULL)
8542 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8543 struct arm_per_objfile);
8544 set_objfile_data (objfile, arm_objfile_data_key, data);
8545 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8546 objfile->obfd->section_count,
8547 VEC(arm_mapping_symbol_s) *);
8549 map_p = &data->section_maps[bfd_get_section (sym)->index];
8551 new_map_sym.value = sym->value;
8552 new_map_sym.type = name[1];
8554 /* Assume that most mapping symbols appear in order of increasing
8555 value. If they were randomly distributed, it would be faster to
8556 always push here and then sort at first use. */
8557 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8559 struct arm_mapping_symbol *prev_map_sym;
8561 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8562 if (prev_map_sym->value >= sym->value)
8564 unsigned int idx;
8565 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8566 arm_compare_mapping_symbols);
8567 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8568 return;
8572 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8575 static void
8576 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8578 struct gdbarch *gdbarch = regcache->arch ();
8579 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8581 /* If necessary, set the T bit. */
8582 if (arm_apcs_32)
8584 ULONGEST val, t_bit;
8585 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8586 t_bit = arm_psr_thumb_bit (gdbarch);
8587 if (arm_pc_is_thumb (gdbarch, pc))
8588 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8589 val | t_bit);
8590 else
8591 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8592 val & ~t_bit);
8596 /* Read the contents of a NEON quad register, by reading from two
8597 double registers. This is used to implement the quad pseudo
8598 registers, and for argument passing in case the quad registers are
8599 missing; vectors are passed in quad registers when using the VFP
8600 ABI, even if a NEON unit is not present. REGNUM is the index of
8601 the quad register, in [0, 15]. */
8603 static enum register_status
8604 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8605 int regnum, gdb_byte *buf)
8607 char name_buf[4];
8608 gdb_byte reg_buf[8];
8609 int offset, double_regnum;
8610 enum register_status status;
8612 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8613 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8614 strlen (name_buf));
8616 /* d0 is always the least significant half of q0. */
8617 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8618 offset = 8;
8619 else
8620 offset = 0;
8622 status = regcache->raw_read (double_regnum, reg_buf);
8623 if (status != REG_VALID)
8624 return status;
8625 memcpy (buf + offset, reg_buf, 8);
8627 offset = 8 - offset;
8628 status = regcache->raw_read (double_regnum + 1, reg_buf);
8629 if (status != REG_VALID)
8630 return status;
8631 memcpy (buf + offset, reg_buf, 8);
8633 return REG_VALID;
8636 static enum register_status
8637 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8638 int regnum, gdb_byte *buf)
8640 const int num_regs = gdbarch_num_regs (gdbarch);
8641 char name_buf[4];
8642 gdb_byte reg_buf[8];
8643 int offset, double_regnum;
8645 gdb_assert (regnum >= num_regs);
8646 regnum -= num_regs;
8648 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8649 /* Quad-precision register. */
8650 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8651 else
8653 enum register_status status;
8655 /* Single-precision register. */
8656 gdb_assert (regnum < 32);
8658 /* s0 is always the least significant half of d0. */
8659 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8660 offset = (regnum & 1) ? 0 : 4;
8661 else
8662 offset = (regnum & 1) ? 4 : 0;
8664 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8665 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8666 strlen (name_buf));
8668 status = regcache->raw_read (double_regnum, reg_buf);
8669 if (status == REG_VALID)
8670 memcpy (buf, reg_buf + offset, 4);
8671 return status;
8675 /* Store the contents of BUF to a NEON quad register, by writing to
8676 two double registers. This is used to implement the quad pseudo
8677 registers, and for argument passing in case the quad registers are
8678 missing; vectors are passed in quad registers when using the VFP
8679 ABI, even if a NEON unit is not present. REGNUM is the index
8680 of the quad register, in [0, 15]. */
8682 static void
8683 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8684 int regnum, const gdb_byte *buf)
8686 char name_buf[4];
8687 int offset, double_regnum;
8689 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8690 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8691 strlen (name_buf));
8693 /* d0 is always the least significant half of q0. */
8694 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8695 offset = 8;
8696 else
8697 offset = 0;
8699 regcache->raw_write (double_regnum, buf + offset);
8700 offset = 8 - offset;
8701 regcache->raw_write (double_regnum + 1, buf + offset);
8704 static void
8705 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8706 int regnum, const gdb_byte *buf)
8708 const int num_regs = gdbarch_num_regs (gdbarch);
8709 char name_buf[4];
8710 gdb_byte reg_buf[8];
8711 int offset, double_regnum;
8713 gdb_assert (regnum >= num_regs);
8714 regnum -= num_regs;
8716 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8717 /* Quad-precision register. */
8718 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8719 else
8721 /* Single-precision register. */
8722 gdb_assert (regnum < 32);
8724 /* s0 is always the least significant half of d0. */
8725 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8726 offset = (regnum & 1) ? 0 : 4;
8727 else
8728 offset = (regnum & 1) ? 4 : 0;
8730 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8731 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8732 strlen (name_buf));
8734 regcache->raw_read (double_regnum, reg_buf);
8735 memcpy (reg_buf + offset, buf, 4);
8736 regcache->raw_write (double_regnum, reg_buf);
8740 static struct value *
8741 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8743 const int *reg_p = (const int *) baton;
8744 return value_of_register (*reg_p, frame);
8747 static enum gdb_osabi
8748 arm_elf_osabi_sniffer (bfd *abfd)
8750 unsigned int elfosabi;
8751 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8753 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8755 if (elfosabi == ELFOSABI_ARM)
8756 /* GNU tools use this value. Check note sections in this case,
8757 as well. */
8758 bfd_map_over_sections (abfd,
8759 generic_elf_osabi_sniff_abi_tag_sections,
8760 &osabi);
8762 /* Anything else will be handled by the generic ELF sniffer. */
8763 return osabi;
8766 static int
8767 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8768 struct reggroup *group)
8770 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8771 this, FPS register belongs to save_regroup, restore_reggroup, and
8772 all_reggroup, of course. */
8773 if (regnum == ARM_FPS_REGNUM)
8774 return (group == float_reggroup
8775 || group == save_reggroup
8776 || group == restore_reggroup
8777 || group == all_reggroup);
8778 else
8779 return default_register_reggroup_p (gdbarch, regnum, group);
8783 /* For backward-compatibility we allow two 'g' packet lengths with
8784 the remote protocol depending on whether FPA registers are
8785 supplied. M-profile targets do not have FPA registers, but some
8786 stubs already exist in the wild which use a 'g' packet which
8787 supplies them albeit with dummy values. The packet format which
8788 includes FPA registers should be considered deprecated for
8789 M-profile targets. */
8791 static void
8792 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8794 if (gdbarch_tdep (gdbarch)->is_m)
8796 /* If we know from the executable this is an M-profile target,
8797 cater for remote targets whose register set layout is the
8798 same as the FPA layout. */
8799 register_remote_g_packet_guess (gdbarch,
8800 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8801 (16 * INT_REGISTER_SIZE)
8802 + (8 * FP_REGISTER_SIZE)
8803 + (2 * INT_REGISTER_SIZE),
8804 tdesc_arm_with_m_fpa_layout);
8806 /* The regular M-profile layout. */
8807 register_remote_g_packet_guess (gdbarch,
8808 /* r0-r12,sp,lr,pc; xpsr */
8809 (16 * INT_REGISTER_SIZE)
8810 + INT_REGISTER_SIZE,
8811 tdesc_arm_with_m);
8813 /* M-profile plus M4F VFP. */
8814 register_remote_g_packet_guess (gdbarch,
8815 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8816 (16 * INT_REGISTER_SIZE)
8817 + (16 * VFP_REGISTER_SIZE)
8818 + (2 * INT_REGISTER_SIZE),
8819 tdesc_arm_with_m_vfp_d16);
8822 /* Otherwise we don't have a useful guess. */
8825 /* Implement the code_of_frame_writable gdbarch method. */
8827 static int
8828 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8830 if (gdbarch_tdep (gdbarch)->is_m
8831 && get_frame_type (frame) == SIGTRAMP_FRAME)
8833 /* M-profile exception frames return to some magic PCs, where
8834 isn't writable at all. */
8835 return 0;
8837 else
8838 return 1;
8842 /* Initialize the current architecture based on INFO. If possible,
8843 re-use an architecture from ARCHES, which is a list of
8844 architectures already created during this debugging session.
8846 Called e.g. at program startup, when reading a core file, and when
8847 reading a binary file. */
8849 static struct gdbarch *
8850 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8852 struct gdbarch_tdep *tdep;
8853 struct gdbarch *gdbarch;
8854 struct gdbarch_list *best_arch;
8855 enum arm_abi_kind arm_abi = arm_abi_global;
8856 enum arm_float_model fp_model = arm_fp_model;
8857 struct tdesc_arch_data *tdesc_data = NULL;
8858 int i, is_m = 0;
8859 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8860 int have_wmmx_registers = 0;
8861 int have_neon = 0;
8862 int have_fpa_registers = 1;
8863 const struct target_desc *tdesc = info.target_desc;
8865 /* If we have an object to base this architecture on, try to determine
8866 its ABI. */
8868 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8870 int ei_osabi, e_flags;
8872 switch (bfd_get_flavour (info.abfd))
8874 case bfd_target_coff_flavour:
8875 /* Assume it's an old APCS-style ABI. */
8876 /* XXX WinCE? */
8877 arm_abi = ARM_ABI_APCS;
8878 break;
8880 case bfd_target_elf_flavour:
8881 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8882 e_flags = elf_elfheader (info.abfd)->e_flags;
8884 if (ei_osabi == ELFOSABI_ARM)
8886 /* GNU tools used to use this value, but do not for EABI
8887 objects. There's nowhere to tag an EABI version
8888 anyway, so assume APCS. */
8889 arm_abi = ARM_ABI_APCS;
8891 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8893 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8895 switch (eabi_ver)
8897 case EF_ARM_EABI_UNKNOWN:
8898 /* Assume GNU tools. */
8899 arm_abi = ARM_ABI_APCS;
8900 break;
8902 case EF_ARM_EABI_VER4:
8903 case EF_ARM_EABI_VER5:
8904 arm_abi = ARM_ABI_AAPCS;
8905 /* EABI binaries default to VFP float ordering.
8906 They may also contain build attributes that can
8907 be used to identify if the VFP argument-passing
8908 ABI is in use. */
8909 if (fp_model == ARM_FLOAT_AUTO)
8911 #ifdef HAVE_ELF
8912 switch (bfd_elf_get_obj_attr_int (info.abfd,
8913 OBJ_ATTR_PROC,
8914 Tag_ABI_VFP_args))
8916 case AEABI_VFP_args_base:
8917 /* "The user intended FP parameter/result
8918 passing to conform to AAPCS, base
8919 variant". */
8920 fp_model = ARM_FLOAT_SOFT_VFP;
8921 break;
8922 case AEABI_VFP_args_vfp:
8923 /* "The user intended FP parameter/result
8924 passing to conform to AAPCS, VFP
8925 variant". */
8926 fp_model = ARM_FLOAT_VFP;
8927 break;
8928 case AEABI_VFP_args_toolchain:
8929 /* "The user intended FP parameter/result
8930 passing to conform to tool chain-specific
8931 conventions" - we don't know any such
8932 conventions, so leave it as "auto". */
8933 break;
8934 case AEABI_VFP_args_compatible:
8935 /* "Code is compatible with both the base
8936 and VFP variants; the user did not permit
8937 non-variadic functions to pass FP
8938 parameters/results" - leave it as
8939 "auto". */
8940 break;
8941 default:
8942 /* Attribute value not mentioned in the
8943 November 2012 ABI, so leave it as
8944 "auto". */
8945 break;
8947 #else
8948 fp_model = ARM_FLOAT_SOFT_VFP;
8949 #endif
8951 break;
8953 default:
8954 /* Leave it as "auto". */
8955 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8956 break;
8959 #ifdef HAVE_ELF
8960 /* Detect M-profile programs. This only works if the
8961 executable file includes build attributes; GCC does
8962 copy them to the executable, but e.g. RealView does
8963 not. */
8964 int attr_arch
8965 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8966 Tag_CPU_arch);
8967 int attr_profile
8968 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8969 Tag_CPU_arch_profile);
8971 /* GCC specifies the profile for v6-M; RealView only
8972 specifies the profile for architectures starting with
8973 V7 (as opposed to architectures with a tag
8974 numerically greater than TAG_CPU_ARCH_V7). */
8975 if (!tdesc_has_registers (tdesc)
8976 && (attr_arch == TAG_CPU_ARCH_V6_M
8977 || attr_arch == TAG_CPU_ARCH_V6S_M
8978 || attr_profile == 'M'))
8979 is_m = 1;
8980 #endif
8983 if (fp_model == ARM_FLOAT_AUTO)
8985 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8987 case 0:
8988 /* Leave it as "auto". Strictly speaking this case
8989 means FPA, but almost nobody uses that now, and
8990 many toolchains fail to set the appropriate bits
8991 for the floating-point model they use. */
8992 break;
8993 case EF_ARM_SOFT_FLOAT:
8994 fp_model = ARM_FLOAT_SOFT_FPA;
8995 break;
8996 case EF_ARM_VFP_FLOAT:
8997 fp_model = ARM_FLOAT_VFP;
8998 break;
8999 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9000 fp_model = ARM_FLOAT_SOFT_VFP;
9001 break;
9005 if (e_flags & EF_ARM_BE8)
9006 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9008 break;
9010 default:
9011 /* Leave it as "auto". */
9012 break;
9016 /* Check any target description for validity. */
9017 if (tdesc_has_registers (tdesc))
9019 /* For most registers we require GDB's default names; but also allow
9020 the numeric names for sp / lr / pc, as a convenience. */
9021 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9022 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9023 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9025 const struct tdesc_feature *feature;
9026 int valid_p;
9028 feature = tdesc_find_feature (tdesc,
9029 "org.gnu.gdb.arm.core");
9030 if (feature == NULL)
9032 feature = tdesc_find_feature (tdesc,
9033 "org.gnu.gdb.arm.m-profile");
9034 if (feature == NULL)
9035 return NULL;
9036 else
9037 is_m = 1;
9040 tdesc_data = tdesc_data_alloc ();
9042 valid_p = 1;
9043 for (i = 0; i < ARM_SP_REGNUM; i++)
9044 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9045 arm_register_names[i]);
9046 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9047 ARM_SP_REGNUM,
9048 arm_sp_names);
9049 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9050 ARM_LR_REGNUM,
9051 arm_lr_names);
9052 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9053 ARM_PC_REGNUM,
9054 arm_pc_names);
9055 if (is_m)
9056 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9057 ARM_PS_REGNUM, "xpsr");
9058 else
9059 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9060 ARM_PS_REGNUM, "cpsr");
9062 if (!valid_p)
9064 tdesc_data_cleanup (tdesc_data);
9065 return NULL;
9068 feature = tdesc_find_feature (tdesc,
9069 "org.gnu.gdb.arm.fpa");
9070 if (feature != NULL)
9072 valid_p = 1;
9073 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9074 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9075 arm_register_names[i]);
9076 if (!valid_p)
9078 tdesc_data_cleanup (tdesc_data);
9079 return NULL;
9082 else
9083 have_fpa_registers = 0;
9085 feature = tdesc_find_feature (tdesc,
9086 "org.gnu.gdb.xscale.iwmmxt");
9087 if (feature != NULL)
9089 static const char *const iwmmxt_names[] = {
9090 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9091 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9092 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9093 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9096 valid_p = 1;
9097 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9098 valid_p
9099 &= tdesc_numbered_register (feature, tdesc_data, i,
9100 iwmmxt_names[i - ARM_WR0_REGNUM]);
9102 /* Check for the control registers, but do not fail if they
9103 are missing. */
9104 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9105 tdesc_numbered_register (feature, tdesc_data, i,
9106 iwmmxt_names[i - ARM_WR0_REGNUM]);
9108 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9109 valid_p
9110 &= tdesc_numbered_register (feature, tdesc_data, i,
9111 iwmmxt_names[i - ARM_WR0_REGNUM]);
9113 if (!valid_p)
9115 tdesc_data_cleanup (tdesc_data);
9116 return NULL;
9119 have_wmmx_registers = 1;
9122 /* If we have a VFP unit, check whether the single precision registers
9123 are present. If not, then we will synthesize them as pseudo
9124 registers. */
9125 feature = tdesc_find_feature (tdesc,
9126 "org.gnu.gdb.arm.vfp");
9127 if (feature != NULL)
9129 static const char *const vfp_double_names[] = {
9130 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9131 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9132 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9133 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9136 /* Require the double precision registers. There must be either
9137 16 or 32. */
9138 valid_p = 1;
9139 for (i = 0; i < 32; i++)
9141 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9142 ARM_D0_REGNUM + i,
9143 vfp_double_names[i]);
9144 if (!valid_p)
9145 break;
9147 if (!valid_p && i == 16)
9148 valid_p = 1;
9150 /* Also require FPSCR. */
9151 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9152 ARM_FPSCR_REGNUM, "fpscr");
9153 if (!valid_p)
9155 tdesc_data_cleanup (tdesc_data);
9156 return NULL;
9159 if (tdesc_unnumbered_register (feature, "s0") == 0)
9160 have_vfp_pseudos = 1;
9162 vfp_register_count = i;
9164 /* If we have VFP, also check for NEON. The architecture allows
9165 NEON without VFP (integer vector operations only), but GDB
9166 does not support that. */
9167 feature = tdesc_find_feature (tdesc,
9168 "org.gnu.gdb.arm.neon");
9169 if (feature != NULL)
9171 /* NEON requires 32 double-precision registers. */
9172 if (i != 32)
9174 tdesc_data_cleanup (tdesc_data);
9175 return NULL;
9178 /* If there are quad registers defined by the stub, use
9179 their type; otherwise (normally) provide them with
9180 the default type. */
9181 if (tdesc_unnumbered_register (feature, "q0") == 0)
9182 have_neon_pseudos = 1;
9184 have_neon = 1;
9189 /* If there is already a candidate, use it. */
9190 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9191 best_arch != NULL;
9192 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9194 if (arm_abi != ARM_ABI_AUTO
9195 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9196 continue;
9198 if (fp_model != ARM_FLOAT_AUTO
9199 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9200 continue;
9202 /* There are various other properties in tdep that we do not
9203 need to check here: those derived from a target description,
9204 since gdbarches with a different target description are
9205 automatically disqualified. */
9207 /* Do check is_m, though, since it might come from the binary. */
9208 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9209 continue;
9211 /* Found a match. */
9212 break;
9215 if (best_arch != NULL)
9217 if (tdesc_data != NULL)
9218 tdesc_data_cleanup (tdesc_data);
9219 return best_arch->gdbarch;
9222 tdep = XCNEW (struct gdbarch_tdep);
9223 gdbarch = gdbarch_alloc (&info, tdep);
9225 /* Record additional information about the architecture we are defining.
9226 These are gdbarch discriminators, like the OSABI. */
9227 tdep->arm_abi = arm_abi;
9228 tdep->fp_model = fp_model;
9229 tdep->is_m = is_m;
9230 tdep->have_fpa_registers = have_fpa_registers;
9231 tdep->have_wmmx_registers = have_wmmx_registers;
9232 gdb_assert (vfp_register_count == 0
9233 || vfp_register_count == 16
9234 || vfp_register_count == 32);
9235 tdep->vfp_register_count = vfp_register_count;
9236 tdep->have_vfp_pseudos = have_vfp_pseudos;
9237 tdep->have_neon_pseudos = have_neon_pseudos;
9238 tdep->have_neon = have_neon;
9240 arm_register_g_packet_guesses (gdbarch);
9242 /* Breakpoints. */
9243 switch (info.byte_order_for_code)
9245 case BFD_ENDIAN_BIG:
9246 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9247 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9248 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9249 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9251 break;
9253 case BFD_ENDIAN_LITTLE:
9254 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9255 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9256 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9257 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9259 break;
9261 default:
9262 internal_error (__FILE__, __LINE__,
9263 _("arm_gdbarch_init: bad byte order for float format"));
9266 /* On ARM targets char defaults to unsigned. */
9267 set_gdbarch_char_signed (gdbarch, 0);
9269 /* wchar_t is unsigned under the AAPCS. */
9270 if (tdep->arm_abi == ARM_ABI_AAPCS)
9271 set_gdbarch_wchar_signed (gdbarch, 0);
9272 else
9273 set_gdbarch_wchar_signed (gdbarch, 1);
9275 /* Compute type alignment. */
9276 set_gdbarch_type_align (gdbarch, arm_type_align);
9278 /* Note: for displaced stepping, this includes the breakpoint, and one word
9279 of additional scratch space. This setting isn't used for anything beside
9280 displaced stepping at present. */
9281 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9283 /* This should be low enough for everything. */
9284 tdep->lowest_pc = 0x20;
9285 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9287 /* The default, for both APCS and AAPCS, is to return small
9288 structures in registers. */
9289 tdep->struct_return = reg_struct_return;
9291 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9292 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9294 if (is_m)
9295 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9297 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9299 frame_base_set_default (gdbarch, &arm_normal_base);
9301 /* Address manipulation. */
9302 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9304 /* Advance PC across function entry code. */
9305 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9307 /* Detect whether PC is at a point where the stack has been destroyed. */
9308 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9310 /* Skip trampolines. */
9311 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9313 /* The stack grows downward. */
9314 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9316 /* Breakpoint manipulation. */
9317 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9318 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9319 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9320 arm_breakpoint_kind_from_current_state);
9322 /* Information about registers, etc. */
9323 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9324 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9325 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9326 set_gdbarch_register_type (gdbarch, arm_register_type);
9327 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9329 /* This "info float" is FPA-specific. Use the generic version if we
9330 do not have FPA. */
9331 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9332 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9334 /* Internal <-> external register number maps. */
9335 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9336 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9338 set_gdbarch_register_name (gdbarch, arm_register_name);
9340 /* Returning results. */
9341 set_gdbarch_return_value (gdbarch, arm_return_value);
9343 /* Disassembly. */
9344 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9346 /* Minsymbol frobbing. */
9347 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9348 set_gdbarch_coff_make_msymbol_special (gdbarch,
9349 arm_coff_make_msymbol_special);
9350 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9352 /* Thumb-2 IT block support. */
9353 set_gdbarch_adjust_breakpoint_address (gdbarch,
9354 arm_adjust_breakpoint_address);
9356 /* Virtual tables. */
9357 set_gdbarch_vbit_in_delta (gdbarch, 1);
9359 /* Hook in the ABI-specific overrides, if they have been registered. */
9360 gdbarch_init_osabi (info, gdbarch);
9362 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9364 /* Add some default predicates. */
9365 if (is_m)
9366 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9367 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9368 dwarf2_append_unwinders (gdbarch);
9369 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9370 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9371 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9373 /* Now we have tuned the configuration, set a few final things,
9374 based on what the OS ABI has told us. */
9376 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9377 binaries are always marked. */
9378 if (tdep->arm_abi == ARM_ABI_AUTO)
9379 tdep->arm_abi = ARM_ABI_APCS;
9381 /* Watchpoints are not steppable. */
9382 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9384 /* We used to default to FPA for generic ARM, but almost nobody
9385 uses that now, and we now provide a way for the user to force
9386 the model. So default to the most useful variant. */
9387 if (tdep->fp_model == ARM_FLOAT_AUTO)
9388 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9390 if (tdep->jb_pc >= 0)
9391 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9393 /* Floating point sizes and format. */
9394 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9395 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9397 set_gdbarch_double_format
9398 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9399 set_gdbarch_long_double_format
9400 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9402 else
9404 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9405 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9408 if (have_vfp_pseudos)
9410 /* NOTE: These are the only pseudo registers used by
9411 the ARM target at the moment. If more are added, a
9412 little more care in numbering will be needed. */
9414 int num_pseudos = 32;
9415 if (have_neon_pseudos)
9416 num_pseudos += 16;
9417 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9418 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9419 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9422 if (tdesc_data)
9424 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9426 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9428 /* Override tdesc_register_type to adjust the types of VFP
9429 registers for NEON. */
9430 set_gdbarch_register_type (gdbarch, arm_register_type);
9433 /* Add standard register aliases. We add aliases even for those
9434 nanes which are used by the current architecture - it's simpler,
9435 and does no harm, since nothing ever lists user registers. */
9436 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9437 user_reg_add (gdbarch, arm_register_aliases[i].name,
9438 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9440 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9441 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9443 return gdbarch;
9446 static void
9447 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9449 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9451 if (tdep == NULL)
9452 return;
9454 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9455 (unsigned long) tdep->lowest_pc);
9458 #if GDB_SELF_TEST
9459 namespace selftests
9461 static void arm_record_test (void);
9463 #endif
9465 void
9466 _initialize_arm_tdep (void)
9468 long length;
9469 int i, j;
9470 char regdesc[1024], *rdptr = regdesc;
9471 size_t rest = sizeof (regdesc);
9473 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9475 arm_objfile_data_key
9476 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9478 /* Add ourselves to objfile event chain. */
9479 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9480 arm_exidx_data_key
9481 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9483 /* Register an ELF OS ABI sniffer for ARM binaries. */
9484 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9485 bfd_target_elf_flavour,
9486 arm_elf_osabi_sniffer);
9488 /* Initialize the standard target descriptions. */
9489 initialize_tdesc_arm_with_m ();
9490 initialize_tdesc_arm_with_m_fpa_layout ();
9491 initialize_tdesc_arm_with_m_vfp_d16 ();
9492 initialize_tdesc_arm_with_iwmmxt ();
9493 initialize_tdesc_arm_with_vfpv2 ();
9494 initialize_tdesc_arm_with_vfpv3 ();
9495 initialize_tdesc_arm_with_neon ();
9497 /* Add root prefix command for all "set arm"/"show arm" commands. */
9498 add_prefix_cmd ("arm", no_class, set_arm_command,
9499 _("Various ARM-specific commands."),
9500 &setarmcmdlist, "set arm ", 0, &setlist);
9502 add_prefix_cmd ("arm", no_class, show_arm_command,
9503 _("Various ARM-specific commands."),
9504 &showarmcmdlist, "show arm ", 0, &showlist);
9507 arm_disassembler_options = xstrdup ("reg-names-std");
9508 const disasm_options_t *disasm_options
9509 = &disassembler_options_arm ()->options;
9510 int num_disassembly_styles = 0;
9511 for (i = 0; disasm_options->name[i] != NULL; i++)
9512 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9513 num_disassembly_styles++;
9515 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9516 valid_disassembly_styles = XNEWVEC (const char *,
9517 num_disassembly_styles + 1);
9518 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9519 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9521 size_t offset = strlen ("reg-names-");
9522 const char *style = disasm_options->name[i];
9523 valid_disassembly_styles[j++] = &style[offset];
9524 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9525 disasm_options->description[i]);
9526 rdptr += length;
9527 rest -= length;
9529 /* Mark the end of valid options. */
9530 valid_disassembly_styles[num_disassembly_styles] = NULL;
9532 /* Create the help text. */
9533 std::string helptext = string_printf ("%s%s%s",
9534 _("The valid values are:\n"),
9535 regdesc,
9536 _("The default is \"std\"."));
9538 add_setshow_enum_cmd("disassembler", no_class,
9539 valid_disassembly_styles, &disassembly_style,
9540 _("Set the disassembly style."),
9541 _("Show the disassembly style."),
9542 helptext.c_str (),
9543 set_disassembly_style_sfunc,
9544 show_disassembly_style_sfunc,
9545 &setarmcmdlist, &showarmcmdlist);
9547 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9548 _("Set usage of ARM 32-bit mode."),
9549 _("Show usage of ARM 32-bit mode."),
9550 _("When off, a 26-bit PC will be used."),
9551 NULL,
9552 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9553 mode is %s. */
9554 &setarmcmdlist, &showarmcmdlist);
9556 /* Add a command to allow the user to force the FPU model. */
9557 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9558 _("Set the floating point type."),
9559 _("Show the floating point type."),
9560 _("auto - Determine the FP typefrom the OS-ABI.\n\
9561 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9562 fpa - FPA co-processor (GCC compiled).\n\
9563 softvfp - Software FP with pure-endian doubles.\n\
9564 vfp - VFP co-processor."),
9565 set_fp_model_sfunc, show_fp_model,
9566 &setarmcmdlist, &showarmcmdlist);
9568 /* Add a command to allow the user to force the ABI. */
9569 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9570 _("Set the ABI."),
9571 _("Show the ABI."),
9572 NULL, arm_set_abi, arm_show_abi,
9573 &setarmcmdlist, &showarmcmdlist);
9575 /* Add two commands to allow the user to force the assumed
9576 execution mode. */
9577 add_setshow_enum_cmd ("fallback-mode", class_support,
9578 arm_mode_strings, &arm_fallback_mode_string,
9579 _("Set the mode assumed when symbols are unavailable."),
9580 _("Show the mode assumed when symbols are unavailable."),
9581 NULL, NULL, arm_show_fallback_mode,
9582 &setarmcmdlist, &showarmcmdlist);
9583 add_setshow_enum_cmd ("force-mode", class_support,
9584 arm_mode_strings, &arm_force_mode_string,
9585 _("Set the mode assumed even when symbols are available."),
9586 _("Show the mode assumed even when symbols are available."),
9587 NULL, NULL, arm_show_force_mode,
9588 &setarmcmdlist, &showarmcmdlist);
9590 /* Debugging flag. */
9591 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9592 _("Set ARM debugging."),
9593 _("Show ARM debugging."),
9594 _("When on, arm-specific debugging is enabled."),
9595 NULL,
9596 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9597 &setdebuglist, &showdebuglist);
9599 #if GDB_SELF_TEST
9600 selftests::register_test ("arm-record", selftests::arm_record_test);
9601 #endif
9605 /* ARM-reversible process record data structures. */
9607 #define ARM_INSN_SIZE_BYTES 4
9608 #define THUMB_INSN_SIZE_BYTES 2
9609 #define THUMB2_INSN_SIZE_BYTES 4
9612 /* Position of the bit within a 32-bit ARM instruction
9613 that defines whether the instruction is a load or store. */
9614 #define INSN_S_L_BIT_NUM 20
9616 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9617 do \
9619 unsigned int reg_len = LENGTH; \
9620 if (reg_len) \
9622 REGS = XNEWVEC (uint32_t, reg_len); \
9623 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9626 while (0)
9628 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9629 do \
9631 unsigned int mem_len = LENGTH; \
9632 if (mem_len) \
9634 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9635 memcpy(&MEMS->len, &RECORD_BUF[0], \
9636 sizeof(struct arm_mem_r) * LENGTH); \
9639 while (0)
9641 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9642 #define INSN_RECORDED(ARM_RECORD) \
9643 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9645 /* ARM memory record structure. */
9646 struct arm_mem_r
9648 uint32_t len; /* Record length. */
9649 uint32_t addr; /* Memory address. */
9652 /* ARM instruction record contains opcode of current insn
9653 and execution state (before entry to decode_insn()),
9654 contains list of to-be-modified registers and
9655 memory blocks (on return from decode_insn()). */
9657 typedef struct insn_decode_record_t
9659 struct gdbarch *gdbarch;
9660 struct regcache *regcache;
9661 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9662 uint32_t arm_insn; /* Should accommodate thumb. */
9663 uint32_t cond; /* Condition code. */
9664 uint32_t opcode; /* Insn opcode. */
9665 uint32_t decode; /* Insn decode bits. */
9666 uint32_t mem_rec_count; /* No of mem records. */
9667 uint32_t reg_rec_count; /* No of reg records. */
9668 uint32_t *arm_regs; /* Registers to be saved for this record. */
9669 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9670 } insn_decode_record;
9673 /* Checks ARM SBZ and SBO mandatory fields. */
9675 static int
9676 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9678 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9680 if (!len)
9681 return 1;
9683 if (!sbo)
9684 ones = ~ones;
9686 while (ones)
9688 if (!(ones & sbo))
9690 return 0;
9692 ones = ones >> 1;
9694 return 1;
9697 enum arm_record_result
9699 ARM_RECORD_SUCCESS = 0,
9700 ARM_RECORD_FAILURE = 1
9703 typedef enum
9705 ARM_RECORD_STRH=1,
9706 ARM_RECORD_STRD
9707 } arm_record_strx_t;
9709 typedef enum
9711 ARM_RECORD=1,
9712 THUMB_RECORD,
9713 THUMB2_RECORD
9714 } record_type_t;
9717 static int
9718 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9719 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9722 struct regcache *reg_cache = arm_insn_r->regcache;
9723 ULONGEST u_regval[2]= {0};
9725 uint32_t reg_src1 = 0, reg_src2 = 0;
9726 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9728 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9729 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9731 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9733 /* 1) Handle misc store, immediate offset. */
9734 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9735 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9736 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9737 regcache_raw_read_unsigned (reg_cache, reg_src1,
9738 &u_regval[0]);
9739 if (ARM_PC_REGNUM == reg_src1)
9741 /* If R15 was used as Rn, hence current PC+8. */
9742 u_regval[0] = u_regval[0] + 8;
9744 offset_8 = (immed_high << 4) | immed_low;
9745 /* Calculate target store address. */
9746 if (14 == arm_insn_r->opcode)
9748 tgt_mem_addr = u_regval[0] + offset_8;
9750 else
9752 tgt_mem_addr = u_regval[0] - offset_8;
9754 if (ARM_RECORD_STRH == str_type)
9756 record_buf_mem[0] = 2;
9757 record_buf_mem[1] = tgt_mem_addr;
9758 arm_insn_r->mem_rec_count = 1;
9760 else if (ARM_RECORD_STRD == str_type)
9762 record_buf_mem[0] = 4;
9763 record_buf_mem[1] = tgt_mem_addr;
9764 record_buf_mem[2] = 4;
9765 record_buf_mem[3] = tgt_mem_addr + 4;
9766 arm_insn_r->mem_rec_count = 2;
9769 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9771 /* 2) Store, register offset. */
9772 /* Get Rm. */
9773 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9774 /* Get Rn. */
9775 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9776 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9777 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9778 if (15 == reg_src2)
9780 /* If R15 was used as Rn, hence current PC+8. */
9781 u_regval[0] = u_regval[0] + 8;
9783 /* Calculate target store address, Rn +/- Rm, register offset. */
9784 if (12 == arm_insn_r->opcode)
9786 tgt_mem_addr = u_regval[0] + u_regval[1];
9788 else
9790 tgt_mem_addr = u_regval[1] - u_regval[0];
9792 if (ARM_RECORD_STRH == str_type)
9794 record_buf_mem[0] = 2;
9795 record_buf_mem[1] = tgt_mem_addr;
9796 arm_insn_r->mem_rec_count = 1;
9798 else if (ARM_RECORD_STRD == str_type)
9800 record_buf_mem[0] = 4;
9801 record_buf_mem[1] = tgt_mem_addr;
9802 record_buf_mem[2] = 4;
9803 record_buf_mem[3] = tgt_mem_addr + 4;
9804 arm_insn_r->mem_rec_count = 2;
9807 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9808 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9810 /* 3) Store, immediate pre-indexed. */
9811 /* 5) Store, immediate post-indexed. */
9812 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9813 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9814 offset_8 = (immed_high << 4) | immed_low;
9815 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9816 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9817 /* Calculate target store address, Rn +/- Rm, register offset. */
9818 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9820 tgt_mem_addr = u_regval[0] + offset_8;
9822 else
9824 tgt_mem_addr = u_regval[0] - offset_8;
9826 if (ARM_RECORD_STRH == str_type)
9828 record_buf_mem[0] = 2;
9829 record_buf_mem[1] = tgt_mem_addr;
9830 arm_insn_r->mem_rec_count = 1;
9832 else if (ARM_RECORD_STRD == str_type)
9834 record_buf_mem[0] = 4;
9835 record_buf_mem[1] = tgt_mem_addr;
9836 record_buf_mem[2] = 4;
9837 record_buf_mem[3] = tgt_mem_addr + 4;
9838 arm_insn_r->mem_rec_count = 2;
9840 /* Record Rn also as it changes. */
9841 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9842 arm_insn_r->reg_rec_count = 1;
9844 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9845 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9847 /* 4) Store, register pre-indexed. */
9848 /* 6) Store, register post -indexed. */
9849 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9850 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9851 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9852 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9853 /* Calculate target store address, Rn +/- Rm, register offset. */
9854 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9856 tgt_mem_addr = u_regval[0] + u_regval[1];
9858 else
9860 tgt_mem_addr = u_regval[1] - u_regval[0];
9862 if (ARM_RECORD_STRH == str_type)
9864 record_buf_mem[0] = 2;
9865 record_buf_mem[1] = tgt_mem_addr;
9866 arm_insn_r->mem_rec_count = 1;
9868 else if (ARM_RECORD_STRD == str_type)
9870 record_buf_mem[0] = 4;
9871 record_buf_mem[1] = tgt_mem_addr;
9872 record_buf_mem[2] = 4;
9873 record_buf_mem[3] = tgt_mem_addr + 4;
9874 arm_insn_r->mem_rec_count = 2;
9876 /* Record Rn also as it changes. */
9877 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9878 arm_insn_r->reg_rec_count = 1;
9880 return 0;
9883 /* Handling ARM extension space insns. */
9885 static int
9886 arm_record_extension_space (insn_decode_record *arm_insn_r)
9888 int ret = 0; /* Return value: -1:record failure ; 0:success */
9889 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9890 uint32_t record_buf[8], record_buf_mem[8];
9891 uint32_t reg_src1 = 0;
9892 struct regcache *reg_cache = arm_insn_r->regcache;
9893 ULONGEST u_regval = 0;
9895 gdb_assert (!INSN_RECORDED(arm_insn_r));
9896 /* Handle unconditional insn extension space. */
9898 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9899 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9900 if (arm_insn_r->cond)
9902 /* PLD has no affect on architectural state, it just affects
9903 the caches. */
9904 if (5 == ((opcode1 & 0xE0) >> 5))
9906 /* BLX(1) */
9907 record_buf[0] = ARM_PS_REGNUM;
9908 record_buf[1] = ARM_LR_REGNUM;
9909 arm_insn_r->reg_rec_count = 2;
9911 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9915 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9916 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9918 ret = -1;
9919 /* Undefined instruction on ARM V5; need to handle if later
9920 versions define it. */
9923 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9924 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9925 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9927 /* Handle arithmetic insn extension space. */
9928 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9929 && !INSN_RECORDED(arm_insn_r))
9931 /* Handle MLA(S) and MUL(S). */
9932 if (in_inclusive_range (insn_op1, 0U, 3U))
9934 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9935 record_buf[1] = ARM_PS_REGNUM;
9936 arm_insn_r->reg_rec_count = 2;
9938 else if (in_inclusive_range (insn_op1, 4U, 15U))
9940 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9941 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9942 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9943 record_buf[2] = ARM_PS_REGNUM;
9944 arm_insn_r->reg_rec_count = 3;
9948 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9949 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9950 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9952 /* Handle control insn extension space. */
9954 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9955 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9957 if (!bit (arm_insn_r->arm_insn,25))
9959 if (!bits (arm_insn_r->arm_insn, 4, 7))
9961 if ((0 == insn_op1) || (2 == insn_op1))
9963 /* MRS. */
9964 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9965 arm_insn_r->reg_rec_count = 1;
9967 else if (1 == insn_op1)
9969 /* CSPR is going to be changed. */
9970 record_buf[0] = ARM_PS_REGNUM;
9971 arm_insn_r->reg_rec_count = 1;
9973 else if (3 == insn_op1)
9975 /* SPSR is going to be changed. */
9976 /* We need to get SPSR value, which is yet to be done. */
9977 return -1;
9980 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9982 if (1 == insn_op1)
9984 /* BX. */
9985 record_buf[0] = ARM_PS_REGNUM;
9986 arm_insn_r->reg_rec_count = 1;
9988 else if (3 == insn_op1)
9990 /* CLZ. */
9991 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9992 arm_insn_r->reg_rec_count = 1;
9995 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9997 /* BLX. */
9998 record_buf[0] = ARM_PS_REGNUM;
9999 record_buf[1] = ARM_LR_REGNUM;
10000 arm_insn_r->reg_rec_count = 2;
10002 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10004 /* QADD, QSUB, QDADD, QDSUB */
10005 record_buf[0] = ARM_PS_REGNUM;
10006 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10007 arm_insn_r->reg_rec_count = 2;
10009 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10011 /* BKPT. */
10012 record_buf[0] = ARM_PS_REGNUM;
10013 record_buf[1] = ARM_LR_REGNUM;
10014 arm_insn_r->reg_rec_count = 2;
10016 /* Save SPSR also;how? */
10017 return -1;
10019 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10020 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10021 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10022 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10025 if (0 == insn_op1 || 1 == insn_op1)
10027 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10028 /* We dont do optimization for SMULW<y> where we
10029 need only Rd. */
10030 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10031 record_buf[1] = ARM_PS_REGNUM;
10032 arm_insn_r->reg_rec_count = 2;
10034 else if (2 == insn_op1)
10036 /* SMLAL<x><y>. */
10037 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10038 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10039 arm_insn_r->reg_rec_count = 2;
10041 else if (3 == insn_op1)
10043 /* SMUL<x><y>. */
10044 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10045 arm_insn_r->reg_rec_count = 1;
10049 else
10051 /* MSR : immediate form. */
10052 if (1 == insn_op1)
10054 /* CSPR is going to be changed. */
10055 record_buf[0] = ARM_PS_REGNUM;
10056 arm_insn_r->reg_rec_count = 1;
10058 else if (3 == insn_op1)
10060 /* SPSR is going to be changed. */
10061 /* we need to get SPSR value, which is yet to be done */
10062 return -1;
10067 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10068 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10069 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10071 /* Handle load/store insn extension space. */
10073 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10074 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10075 && !INSN_RECORDED(arm_insn_r))
10077 /* SWP/SWPB. */
10078 if (0 == insn_op1)
10080 /* These insn, changes register and memory as well. */
10081 /* SWP or SWPB insn. */
10082 /* Get memory address given by Rn. */
10083 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10084 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10085 /* SWP insn ?, swaps word. */
10086 if (8 == arm_insn_r->opcode)
10088 record_buf_mem[0] = 4;
10090 else
10092 /* SWPB insn, swaps only byte. */
10093 record_buf_mem[0] = 1;
10095 record_buf_mem[1] = u_regval;
10096 arm_insn_r->mem_rec_count = 1;
10097 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10098 arm_insn_r->reg_rec_count = 1;
10100 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10102 /* STRH. */
10103 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10104 ARM_RECORD_STRH);
10106 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10108 /* LDRD. */
10109 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10110 record_buf[1] = record_buf[0] + 1;
10111 arm_insn_r->reg_rec_count = 2;
10113 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10115 /* STRD. */
10116 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10117 ARM_RECORD_STRD);
10119 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10121 /* LDRH, LDRSB, LDRSH. */
10122 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10123 arm_insn_r->reg_rec_count = 1;
10128 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10129 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10130 && !INSN_RECORDED(arm_insn_r))
10132 ret = -1;
10133 /* Handle coprocessor insn extension space. */
10136 /* To be done for ARMv5 and later; as of now we return -1. */
10137 if (-1 == ret)
10138 return ret;
10140 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10141 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10143 return ret;
10146 /* Handling opcode 000 insns. */
10148 static int
10149 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10151 struct regcache *reg_cache = arm_insn_r->regcache;
10152 uint32_t record_buf[8], record_buf_mem[8];
10153 ULONGEST u_regval[2] = {0};
10155 uint32_t reg_src1 = 0;
10156 uint32_t opcode1 = 0;
10158 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10159 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10160 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10162 if (!((opcode1 & 0x19) == 0x10))
10164 /* Data-processing (register) and Data-processing (register-shifted
10165 register */
10166 /* Out of 11 shifter operands mode, all the insn modifies destination
10167 register, which is specified by 13-16 decode. */
10168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10169 record_buf[1] = ARM_PS_REGNUM;
10170 arm_insn_r->reg_rec_count = 2;
10172 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10174 /* Miscellaneous instructions */
10176 if (3 == arm_insn_r->decode && 0x12 == opcode1
10177 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10179 /* Handle BLX, branch and link/exchange. */
10180 if (9 == arm_insn_r->opcode)
10182 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10183 and R14 stores the return address. */
10184 record_buf[0] = ARM_PS_REGNUM;
10185 record_buf[1] = ARM_LR_REGNUM;
10186 arm_insn_r->reg_rec_count = 2;
10189 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10191 /* Handle enhanced software breakpoint insn, BKPT. */
10192 /* CPSR is changed to be executed in ARM state, disabling normal
10193 interrupts, entering abort mode. */
10194 /* According to high vector configuration PC is set. */
10195 /* user hit breakpoint and type reverse, in
10196 that case, we need to go back with previous CPSR and
10197 Program Counter. */
10198 record_buf[0] = ARM_PS_REGNUM;
10199 record_buf[1] = ARM_LR_REGNUM;
10200 arm_insn_r->reg_rec_count = 2;
10202 /* Save SPSR also; how? */
10203 return -1;
10205 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10206 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10208 /* Handle BX, branch and link/exchange. */
10209 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10210 record_buf[0] = ARM_PS_REGNUM;
10211 arm_insn_r->reg_rec_count = 1;
10213 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10214 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10215 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10217 /* Count leading zeros: CLZ. */
10218 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10219 arm_insn_r->reg_rec_count = 1;
10221 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10222 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10223 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10224 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10226 /* Handle MRS insn. */
10227 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10228 arm_insn_r->reg_rec_count = 1;
10231 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10233 /* Multiply and multiply-accumulate */
10235 /* Handle multiply instructions. */
10236 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10237 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10239 /* Handle MLA and MUL. */
10240 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10241 record_buf[1] = ARM_PS_REGNUM;
10242 arm_insn_r->reg_rec_count = 2;
10244 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10246 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10247 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10248 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10249 record_buf[2] = ARM_PS_REGNUM;
10250 arm_insn_r->reg_rec_count = 3;
10253 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10255 /* Synchronization primitives */
10257 /* Handling SWP, SWPB. */
10258 /* These insn, changes register and memory as well. */
10259 /* SWP or SWPB insn. */
10261 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10262 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10263 /* SWP insn ?, swaps word. */
10264 if (8 == arm_insn_r->opcode)
10266 record_buf_mem[0] = 4;
10268 else
10270 /* SWPB insn, swaps only byte. */
10271 record_buf_mem[0] = 1;
10273 record_buf_mem[1] = u_regval[0];
10274 arm_insn_r->mem_rec_count = 1;
10275 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10276 arm_insn_r->reg_rec_count = 1;
10278 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10279 || 15 == arm_insn_r->decode)
10281 if ((opcode1 & 0x12) == 2)
10283 /* Extra load/store (unprivileged) */
10284 return -1;
10286 else
10288 /* Extra load/store */
10289 switch (bits (arm_insn_r->arm_insn, 5, 6))
10291 case 1:
10292 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10294 /* STRH (register), STRH (immediate) */
10295 arm_record_strx (arm_insn_r, &record_buf[0],
10296 &record_buf_mem[0], ARM_RECORD_STRH);
10298 else if ((opcode1 & 0x05) == 0x1)
10300 /* LDRH (register) */
10301 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10302 arm_insn_r->reg_rec_count = 1;
10304 if (bit (arm_insn_r->arm_insn, 21))
10306 /* Write back to Rn. */
10307 record_buf[arm_insn_r->reg_rec_count++]
10308 = bits (arm_insn_r->arm_insn, 16, 19);
10311 else if ((opcode1 & 0x05) == 0x5)
10313 /* LDRH (immediate), LDRH (literal) */
10314 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10316 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10317 arm_insn_r->reg_rec_count = 1;
10319 if (rn != 15)
10321 /*LDRH (immediate) */
10322 if (bit (arm_insn_r->arm_insn, 21))
10324 /* Write back to Rn. */
10325 record_buf[arm_insn_r->reg_rec_count++] = rn;
10329 else
10330 return -1;
10331 break;
10332 case 2:
10333 if ((opcode1 & 0x05) == 0x0)
10335 /* LDRD (register) */
10336 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10337 record_buf[1] = record_buf[0] + 1;
10338 arm_insn_r->reg_rec_count = 2;
10340 if (bit (arm_insn_r->arm_insn, 21))
10342 /* Write back to Rn. */
10343 record_buf[arm_insn_r->reg_rec_count++]
10344 = bits (arm_insn_r->arm_insn, 16, 19);
10347 else if ((opcode1 & 0x05) == 0x1)
10349 /* LDRSB (register) */
10350 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10351 arm_insn_r->reg_rec_count = 1;
10353 if (bit (arm_insn_r->arm_insn, 21))
10355 /* Write back to Rn. */
10356 record_buf[arm_insn_r->reg_rec_count++]
10357 = bits (arm_insn_r->arm_insn, 16, 19);
10360 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10362 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10363 LDRSB (literal) */
10364 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10366 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10367 arm_insn_r->reg_rec_count = 1;
10369 if (rn != 15)
10371 /*LDRD (immediate), LDRSB (immediate) */
10372 if (bit (arm_insn_r->arm_insn, 21))
10374 /* Write back to Rn. */
10375 record_buf[arm_insn_r->reg_rec_count++] = rn;
10379 else
10380 return -1;
10381 break;
10382 case 3:
10383 if ((opcode1 & 0x05) == 0x0)
10385 /* STRD (register) */
10386 arm_record_strx (arm_insn_r, &record_buf[0],
10387 &record_buf_mem[0], ARM_RECORD_STRD);
10389 else if ((opcode1 & 0x05) == 0x1)
10391 /* LDRSH (register) */
10392 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10393 arm_insn_r->reg_rec_count = 1;
10395 if (bit (arm_insn_r->arm_insn, 21))
10397 /* Write back to Rn. */
10398 record_buf[arm_insn_r->reg_rec_count++]
10399 = bits (arm_insn_r->arm_insn, 16, 19);
10402 else if ((opcode1 & 0x05) == 0x4)
10404 /* STRD (immediate) */
10405 arm_record_strx (arm_insn_r, &record_buf[0],
10406 &record_buf_mem[0], ARM_RECORD_STRD);
10408 else if ((opcode1 & 0x05) == 0x5)
10410 /* LDRSH (immediate), LDRSH (literal) */
10411 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10412 arm_insn_r->reg_rec_count = 1;
10414 if (bit (arm_insn_r->arm_insn, 21))
10416 /* Write back to Rn. */
10417 record_buf[arm_insn_r->reg_rec_count++]
10418 = bits (arm_insn_r->arm_insn, 16, 19);
10421 else
10422 return -1;
10423 break;
10424 default:
10425 return -1;
10429 else
10431 return -1;
10434 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10435 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10436 return 0;
10439 /* Handling opcode 001 insns. */
10441 static int
10442 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10444 uint32_t record_buf[8], record_buf_mem[8];
10446 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10447 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10449 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10450 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10451 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10454 /* Handle MSR insn. */
10455 if (9 == arm_insn_r->opcode)
10457 /* CSPR is going to be changed. */
10458 record_buf[0] = ARM_PS_REGNUM;
10459 arm_insn_r->reg_rec_count = 1;
10461 else
10463 /* SPSR is going to be changed. */
10466 else if (arm_insn_r->opcode <= 15)
10468 /* Normal data processing insns. */
10469 /* Out of 11 shifter operands mode, all the insn modifies destination
10470 register, which is specified by 13-16 decode. */
10471 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10472 record_buf[1] = ARM_PS_REGNUM;
10473 arm_insn_r->reg_rec_count = 2;
10475 else
10477 return -1;
10480 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10481 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10482 return 0;
10485 static int
10486 arm_record_media (insn_decode_record *arm_insn_r)
10488 uint32_t record_buf[8];
10490 switch (bits (arm_insn_r->arm_insn, 22, 24))
10492 case 0:
10493 /* Parallel addition and subtraction, signed */
10494 case 1:
10495 /* Parallel addition and subtraction, unsigned */
10496 case 2:
10497 case 3:
10498 /* Packing, unpacking, saturation and reversal */
10500 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10502 record_buf[arm_insn_r->reg_rec_count++] = rd;
10504 break;
10506 case 4:
10507 case 5:
10508 /* Signed multiplies */
10510 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10511 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10513 record_buf[arm_insn_r->reg_rec_count++] = rd;
10514 if (op1 == 0x0)
10515 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10516 else if (op1 == 0x4)
10517 record_buf[arm_insn_r->reg_rec_count++]
10518 = bits (arm_insn_r->arm_insn, 12, 15);
10520 break;
10522 case 6:
10524 if (bit (arm_insn_r->arm_insn, 21)
10525 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10527 /* SBFX */
10528 record_buf[arm_insn_r->reg_rec_count++]
10529 = bits (arm_insn_r->arm_insn, 12, 15);
10531 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10532 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10534 /* USAD8 and USADA8 */
10535 record_buf[arm_insn_r->reg_rec_count++]
10536 = bits (arm_insn_r->arm_insn, 16, 19);
10539 break;
10541 case 7:
10543 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10544 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10546 /* Permanently UNDEFINED */
10547 return -1;
10549 else
10551 /* BFC, BFI and UBFX */
10552 record_buf[arm_insn_r->reg_rec_count++]
10553 = bits (arm_insn_r->arm_insn, 12, 15);
10556 break;
10558 default:
10559 return -1;
10562 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10564 return 0;
10567 /* Handle ARM mode instructions with opcode 010. */
10569 static int
10570 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10572 struct regcache *reg_cache = arm_insn_r->regcache;
10574 uint32_t reg_base , reg_dest;
10575 uint32_t offset_12, tgt_mem_addr;
10576 uint32_t record_buf[8], record_buf_mem[8];
10577 unsigned char wback;
10578 ULONGEST u_regval;
10580 /* Calculate wback. */
10581 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10582 || (bit (arm_insn_r->arm_insn, 21) == 1);
10584 arm_insn_r->reg_rec_count = 0;
10585 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10587 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10589 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10590 and LDRT. */
10592 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10593 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10595 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10596 preceeds a LDR instruction having R15 as reg_base, it
10597 emulates a branch and link instruction, and hence we need to save
10598 CPSR and PC as well. */
10599 if (ARM_PC_REGNUM == reg_dest)
10600 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10602 /* If wback is true, also save the base register, which is going to be
10603 written to. */
10604 if (wback)
10605 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10607 else
10609 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10611 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10612 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10614 /* Handle bit U. */
10615 if (bit (arm_insn_r->arm_insn, 23))
10617 /* U == 1: Add the offset. */
10618 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10620 else
10622 /* U == 0: subtract the offset. */
10623 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10626 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10627 bytes. */
10628 if (bit (arm_insn_r->arm_insn, 22))
10630 /* STRB and STRBT: 1 byte. */
10631 record_buf_mem[0] = 1;
10633 else
10635 /* STR and STRT: 4 bytes. */
10636 record_buf_mem[0] = 4;
10639 /* Handle bit P. */
10640 if (bit (arm_insn_r->arm_insn, 24))
10641 record_buf_mem[1] = tgt_mem_addr;
10642 else
10643 record_buf_mem[1] = (uint32_t) u_regval;
10645 arm_insn_r->mem_rec_count = 1;
10647 /* If wback is true, also save the base register, which is going to be
10648 written to. */
10649 if (wback)
10650 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10653 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10654 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10655 return 0;
10658 /* Handling opcode 011 insns. */
10660 static int
10661 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10663 struct regcache *reg_cache = arm_insn_r->regcache;
10665 uint32_t shift_imm = 0;
10666 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10667 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10668 uint32_t record_buf[8], record_buf_mem[8];
10670 LONGEST s_word;
10671 ULONGEST u_regval[2];
10673 if (bit (arm_insn_r->arm_insn, 4))
10674 return arm_record_media (arm_insn_r);
10676 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10677 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10679 /* Handle enhanced store insns and LDRD DSP insn,
10680 order begins according to addressing modes for store insns
10681 STRH insn. */
10683 /* LDR or STR? */
10684 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10686 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10687 /* LDR insn has a capability to do branching, if
10688 MOV LR, PC is precedded by LDR insn having Rn as R15
10689 in that case, it emulates branch and link insn, and hence we
10690 need to save CSPR and PC as well. */
10691 if (15 != reg_dest)
10693 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10694 arm_insn_r->reg_rec_count = 1;
10696 else
10698 record_buf[0] = reg_dest;
10699 record_buf[1] = ARM_PS_REGNUM;
10700 arm_insn_r->reg_rec_count = 2;
10703 else
10705 if (! bits (arm_insn_r->arm_insn, 4, 11))
10707 /* Store insn, register offset and register pre-indexed,
10708 register post-indexed. */
10709 /* Get Rm. */
10710 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10711 /* Get Rn. */
10712 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10713 regcache_raw_read_unsigned (reg_cache, reg_src1
10714 , &u_regval[0]);
10715 regcache_raw_read_unsigned (reg_cache, reg_src2
10716 , &u_regval[1]);
10717 if (15 == reg_src2)
10719 /* If R15 was used as Rn, hence current PC+8. */
10720 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10721 u_regval[0] = u_regval[0] + 8;
10723 /* Calculate target store address, Rn +/- Rm, register offset. */
10724 /* U == 1. */
10725 if (bit (arm_insn_r->arm_insn, 23))
10727 tgt_mem_addr = u_regval[0] + u_regval[1];
10729 else
10731 tgt_mem_addr = u_regval[1] - u_regval[0];
10734 switch (arm_insn_r->opcode)
10736 /* STR. */
10737 case 8:
10738 case 12:
10739 /* STR. */
10740 case 9:
10741 case 13:
10742 /* STRT. */
10743 case 1:
10744 case 5:
10745 /* STR. */
10746 case 0:
10747 case 4:
10748 record_buf_mem[0] = 4;
10749 break;
10751 /* STRB. */
10752 case 10:
10753 case 14:
10754 /* STRB. */
10755 case 11:
10756 case 15:
10757 /* STRBT. */
10758 case 3:
10759 case 7:
10760 /* STRB. */
10761 case 2:
10762 case 6:
10763 record_buf_mem[0] = 1;
10764 break;
10766 default:
10767 gdb_assert_not_reached ("no decoding pattern found");
10768 break;
10770 record_buf_mem[1] = tgt_mem_addr;
10771 arm_insn_r->mem_rec_count = 1;
10773 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10774 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10775 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10776 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10777 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10778 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10781 /* Rn is going to be changed in pre-indexed mode and
10782 post-indexed mode as well. */
10783 record_buf[0] = reg_src2;
10784 arm_insn_r->reg_rec_count = 1;
10787 else
10789 /* Store insn, scaled register offset; scaled pre-indexed. */
10790 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10791 /* Get Rm. */
10792 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10793 /* Get Rn. */
10794 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10795 /* Get shift_imm. */
10796 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10797 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10798 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10799 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10800 /* Offset_12 used as shift. */
10801 switch (offset_12)
10803 case 0:
10804 /* Offset_12 used as index. */
10805 offset_12 = u_regval[0] << shift_imm;
10806 break;
10808 case 1:
10809 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10810 break;
10812 case 2:
10813 if (!shift_imm)
10815 if (bit (u_regval[0], 31))
10817 offset_12 = 0xFFFFFFFF;
10819 else
10821 offset_12 = 0;
10824 else
10826 /* This is arithmetic shift. */
10827 offset_12 = s_word >> shift_imm;
10829 break;
10831 case 3:
10832 if (!shift_imm)
10834 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10835 &u_regval[1]);
10836 /* Get C flag value and shift it by 31. */
10837 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10838 | (u_regval[0]) >> 1);
10840 else
10842 offset_12 = (u_regval[0] >> shift_imm) \
10843 | (u_regval[0] <<
10844 (sizeof(uint32_t) - shift_imm));
10846 break;
10848 default:
10849 gdb_assert_not_reached ("no decoding pattern found");
10850 break;
10853 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10854 /* bit U set. */
10855 if (bit (arm_insn_r->arm_insn, 23))
10857 tgt_mem_addr = u_regval[1] + offset_12;
10859 else
10861 tgt_mem_addr = u_regval[1] - offset_12;
10864 switch (arm_insn_r->opcode)
10866 /* STR. */
10867 case 8:
10868 case 12:
10869 /* STR. */
10870 case 9:
10871 case 13:
10872 /* STRT. */
10873 case 1:
10874 case 5:
10875 /* STR. */
10876 case 0:
10877 case 4:
10878 record_buf_mem[0] = 4;
10879 break;
10881 /* STRB. */
10882 case 10:
10883 case 14:
10884 /* STRB. */
10885 case 11:
10886 case 15:
10887 /* STRBT. */
10888 case 3:
10889 case 7:
10890 /* STRB. */
10891 case 2:
10892 case 6:
10893 record_buf_mem[0] = 1;
10894 break;
10896 default:
10897 gdb_assert_not_reached ("no decoding pattern found");
10898 break;
10900 record_buf_mem[1] = tgt_mem_addr;
10901 arm_insn_r->mem_rec_count = 1;
10903 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10904 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10905 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10906 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10907 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10908 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10911 /* Rn is going to be changed in register scaled pre-indexed
10912 mode,and scaled post indexed mode. */
10913 record_buf[0] = reg_src2;
10914 arm_insn_r->reg_rec_count = 1;
10919 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10920 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10921 return 0;
10924 /* Handle ARM mode instructions with opcode 100. */
10926 static int
10927 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10929 struct regcache *reg_cache = arm_insn_r->regcache;
10930 uint32_t register_count = 0, register_bits;
10931 uint32_t reg_base, addr_mode;
10932 uint32_t record_buf[24], record_buf_mem[48];
10933 uint32_t wback;
10934 ULONGEST u_regval;
10936 /* Fetch the list of registers. */
10937 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10938 arm_insn_r->reg_rec_count = 0;
10940 /* Fetch the base register that contains the address we are loading data
10941 to. */
10942 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10944 /* Calculate wback. */
10945 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10947 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10949 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10951 /* Find out which registers are going to be loaded from memory. */
10952 while (register_bits)
10954 if (register_bits & 0x00000001)
10955 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10956 register_bits = register_bits >> 1;
10957 register_count++;
10961 /* If wback is true, also save the base register, which is going to be
10962 written to. */
10963 if (wback)
10964 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10966 /* Save the CPSR register. */
10967 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10969 else
10971 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10973 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10975 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10977 /* Find out how many registers are going to be stored to memory. */
10978 while (register_bits)
10980 if (register_bits & 0x00000001)
10981 register_count++;
10982 register_bits = register_bits >> 1;
10985 switch (addr_mode)
10987 /* STMDA (STMED): Decrement after. */
10988 case 0:
10989 record_buf_mem[1] = (uint32_t) u_regval
10990 - register_count * INT_REGISTER_SIZE + 4;
10991 break;
10992 /* STM (STMIA, STMEA): Increment after. */
10993 case 1:
10994 record_buf_mem[1] = (uint32_t) u_regval;
10995 break;
10996 /* STMDB (STMFD): Decrement before. */
10997 case 2:
10998 record_buf_mem[1] = (uint32_t) u_regval
10999 - register_count * INT_REGISTER_SIZE;
11000 break;
11001 /* STMIB (STMFA): Increment before. */
11002 case 3:
11003 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11004 break;
11005 default:
11006 gdb_assert_not_reached ("no decoding pattern found");
11007 break;
11010 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11011 arm_insn_r->mem_rec_count = 1;
11013 /* If wback is true, also save the base register, which is going to be
11014 written to. */
11015 if (wback)
11016 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11019 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11020 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11021 return 0;
11024 /* Handling opcode 101 insns. */
11026 static int
11027 arm_record_b_bl (insn_decode_record *arm_insn_r)
11029 uint32_t record_buf[8];
11031 /* Handle B, BL, BLX(1) insns. */
11032 /* B simply branches so we do nothing here. */
11033 /* Note: BLX(1) doesnt fall here but instead it falls into
11034 extension space. */
11035 if (bit (arm_insn_r->arm_insn, 24))
11037 record_buf[0] = ARM_LR_REGNUM;
11038 arm_insn_r->reg_rec_count = 1;
11041 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11043 return 0;
11046 static int
11047 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11049 printf_unfiltered (_("Process record does not support instruction "
11050 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11051 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11053 return -1;
11056 /* Record handler for vector data transfer instructions. */
11058 static int
11059 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11061 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11062 uint32_t record_buf[4];
11064 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11065 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11066 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11067 bit_l = bit (arm_insn_r->arm_insn, 20);
11068 bit_c = bit (arm_insn_r->arm_insn, 8);
11070 /* Handle VMOV instruction. */
11071 if (bit_l && bit_c)
11073 record_buf[0] = reg_t;
11074 arm_insn_r->reg_rec_count = 1;
11076 else if (bit_l && !bit_c)
11078 /* Handle VMOV instruction. */
11079 if (bits_a == 0x00)
11081 record_buf[0] = reg_t;
11082 arm_insn_r->reg_rec_count = 1;
11084 /* Handle VMRS instruction. */
11085 else if (bits_a == 0x07)
11087 if (reg_t == 15)
11088 reg_t = ARM_PS_REGNUM;
11090 record_buf[0] = reg_t;
11091 arm_insn_r->reg_rec_count = 1;
11094 else if (!bit_l && !bit_c)
11096 /* Handle VMOV instruction. */
11097 if (bits_a == 0x00)
11099 record_buf[0] = ARM_D0_REGNUM + reg_v;
11101 arm_insn_r->reg_rec_count = 1;
11103 /* Handle VMSR instruction. */
11104 else if (bits_a == 0x07)
11106 record_buf[0] = ARM_FPSCR_REGNUM;
11107 arm_insn_r->reg_rec_count = 1;
11110 else if (!bit_l && bit_c)
11112 /* Handle VMOV instruction. */
11113 if (!(bits_a & 0x04))
11115 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11116 + ARM_D0_REGNUM;
11117 arm_insn_r->reg_rec_count = 1;
11119 /* Handle VDUP instruction. */
11120 else
11122 if (bit (arm_insn_r->arm_insn, 21))
11124 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11125 record_buf[0] = reg_v + ARM_D0_REGNUM;
11126 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11127 arm_insn_r->reg_rec_count = 2;
11129 else
11131 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11132 record_buf[0] = reg_v + ARM_D0_REGNUM;
11133 arm_insn_r->reg_rec_count = 1;
11138 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11139 return 0;
11142 /* Record handler for extension register load/store instructions. */
11144 static int
11145 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11147 uint32_t opcode, single_reg;
11148 uint8_t op_vldm_vstm;
11149 uint32_t record_buf[8], record_buf_mem[128];
11150 ULONGEST u_regval = 0;
11152 struct regcache *reg_cache = arm_insn_r->regcache;
11154 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11155 single_reg = !bit (arm_insn_r->arm_insn, 8);
11156 op_vldm_vstm = opcode & 0x1b;
11158 /* Handle VMOV instructions. */
11159 if ((opcode & 0x1e) == 0x04)
11161 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11163 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11164 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11165 arm_insn_r->reg_rec_count = 2;
11167 else
11169 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11170 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11172 if (single_reg)
11174 /* The first S register number m is REG_M:M (M is bit 5),
11175 the corresponding D register number is REG_M:M / 2, which
11176 is REG_M. */
11177 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11178 /* The second S register number is REG_M:M + 1, the
11179 corresponding D register number is (REG_M:M + 1) / 2.
11180 IOW, if bit M is 1, the first and second S registers
11181 are mapped to different D registers, otherwise, they are
11182 in the same D register. */
11183 if (bit_m)
11185 record_buf[arm_insn_r->reg_rec_count++]
11186 = ARM_D0_REGNUM + reg_m + 1;
11189 else
11191 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11192 arm_insn_r->reg_rec_count = 1;
11196 /* Handle VSTM and VPUSH instructions. */
11197 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11198 || op_vldm_vstm == 0x12)
11200 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11201 uint32_t memory_index = 0;
11203 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11204 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11205 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11206 imm_off32 = imm_off8 << 2;
11207 memory_count = imm_off8;
11209 if (bit (arm_insn_r->arm_insn, 23))
11210 start_address = u_regval;
11211 else
11212 start_address = u_regval - imm_off32;
11214 if (bit (arm_insn_r->arm_insn, 21))
11216 record_buf[0] = reg_rn;
11217 arm_insn_r->reg_rec_count = 1;
11220 while (memory_count > 0)
11222 if (single_reg)
11224 record_buf_mem[memory_index] = 4;
11225 record_buf_mem[memory_index + 1] = start_address;
11226 start_address = start_address + 4;
11227 memory_index = memory_index + 2;
11229 else
11231 record_buf_mem[memory_index] = 4;
11232 record_buf_mem[memory_index + 1] = start_address;
11233 record_buf_mem[memory_index + 2] = 4;
11234 record_buf_mem[memory_index + 3] = start_address + 4;
11235 start_address = start_address + 8;
11236 memory_index = memory_index + 4;
11238 memory_count--;
11240 arm_insn_r->mem_rec_count = (memory_index >> 1);
11242 /* Handle VLDM instructions. */
11243 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11244 || op_vldm_vstm == 0x13)
11246 uint32_t reg_count, reg_vd;
11247 uint32_t reg_index = 0;
11248 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11250 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11251 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11253 /* REG_VD is the first D register number. If the instruction
11254 loads memory to S registers (SINGLE_REG is TRUE), the register
11255 number is (REG_VD << 1 | bit D), so the corresponding D
11256 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11257 if (!single_reg)
11258 reg_vd = reg_vd | (bit_d << 4);
11260 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11261 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11263 /* If the instruction loads memory to D register, REG_COUNT should
11264 be divided by 2, according to the ARM Architecture Reference
11265 Manual. If the instruction loads memory to S register, divide by
11266 2 as well because two S registers are mapped to D register. */
11267 reg_count = reg_count / 2;
11268 if (single_reg && bit_d)
11270 /* Increase the register count if S register list starts from
11271 an odd number (bit d is one). */
11272 reg_count++;
11275 while (reg_count > 0)
11277 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11278 reg_count--;
11280 arm_insn_r->reg_rec_count = reg_index;
11282 /* VSTR Vector store register. */
11283 else if ((opcode & 0x13) == 0x10)
11285 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11286 uint32_t memory_index = 0;
11288 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11289 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11290 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11291 imm_off32 = imm_off8 << 2;
11293 if (bit (arm_insn_r->arm_insn, 23))
11294 start_address = u_regval + imm_off32;
11295 else
11296 start_address = u_regval - imm_off32;
11298 if (single_reg)
11300 record_buf_mem[memory_index] = 4;
11301 record_buf_mem[memory_index + 1] = start_address;
11302 arm_insn_r->mem_rec_count = 1;
11304 else
11306 record_buf_mem[memory_index] = 4;
11307 record_buf_mem[memory_index + 1] = start_address;
11308 record_buf_mem[memory_index + 2] = 4;
11309 record_buf_mem[memory_index + 3] = start_address + 4;
11310 arm_insn_r->mem_rec_count = 2;
11313 /* VLDR Vector load register. */
11314 else if ((opcode & 0x13) == 0x11)
11316 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11318 if (!single_reg)
11320 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11321 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11323 else
11325 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11326 /* Record register D rather than pseudo register S. */
11327 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11329 arm_insn_r->reg_rec_count = 1;
11332 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11333 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11334 return 0;
11337 /* Record handler for arm/thumb mode VFP data processing instructions. */
11339 static int
11340 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11342 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11343 uint32_t record_buf[4];
11344 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11345 enum insn_types curr_insn_type = INSN_INV;
11347 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11348 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11349 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11350 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11351 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11352 bit_d = bit (arm_insn_r->arm_insn, 22);
11353 /* Mask off the "D" bit. */
11354 opc1 = opc1 & ~0x04;
11356 /* Handle VMLA, VMLS. */
11357 if (opc1 == 0x00)
11359 if (bit (arm_insn_r->arm_insn, 10))
11361 if (bit (arm_insn_r->arm_insn, 6))
11362 curr_insn_type = INSN_T0;
11363 else
11364 curr_insn_type = INSN_T1;
11366 else
11368 if (dp_op_sz)
11369 curr_insn_type = INSN_T1;
11370 else
11371 curr_insn_type = INSN_T2;
11374 /* Handle VNMLA, VNMLS, VNMUL. */
11375 else if (opc1 == 0x01)
11377 if (dp_op_sz)
11378 curr_insn_type = INSN_T1;
11379 else
11380 curr_insn_type = INSN_T2;
11382 /* Handle VMUL. */
11383 else if (opc1 == 0x02 && !(opc3 & 0x01))
11385 if (bit (arm_insn_r->arm_insn, 10))
11387 if (bit (arm_insn_r->arm_insn, 6))
11388 curr_insn_type = INSN_T0;
11389 else
11390 curr_insn_type = INSN_T1;
11392 else
11394 if (dp_op_sz)
11395 curr_insn_type = INSN_T1;
11396 else
11397 curr_insn_type = INSN_T2;
11400 /* Handle VADD, VSUB. */
11401 else if (opc1 == 0x03)
11403 if (!bit (arm_insn_r->arm_insn, 9))
11405 if (bit (arm_insn_r->arm_insn, 6))
11406 curr_insn_type = INSN_T0;
11407 else
11408 curr_insn_type = INSN_T1;
11410 else
11412 if (dp_op_sz)
11413 curr_insn_type = INSN_T1;
11414 else
11415 curr_insn_type = INSN_T2;
11418 /* Handle VDIV. */
11419 else if (opc1 == 0x08)
11421 if (dp_op_sz)
11422 curr_insn_type = INSN_T1;
11423 else
11424 curr_insn_type = INSN_T2;
11426 /* Handle all other vfp data processing instructions. */
11427 else if (opc1 == 0x0b)
11429 /* Handle VMOV. */
11430 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11432 if (bit (arm_insn_r->arm_insn, 4))
11434 if (bit (arm_insn_r->arm_insn, 6))
11435 curr_insn_type = INSN_T0;
11436 else
11437 curr_insn_type = INSN_T1;
11439 else
11441 if (dp_op_sz)
11442 curr_insn_type = INSN_T1;
11443 else
11444 curr_insn_type = INSN_T2;
11447 /* Handle VNEG and VABS. */
11448 else if ((opc2 == 0x01 && opc3 == 0x01)
11449 || (opc2 == 0x00 && opc3 == 0x03))
11451 if (!bit (arm_insn_r->arm_insn, 11))
11453 if (bit (arm_insn_r->arm_insn, 6))
11454 curr_insn_type = INSN_T0;
11455 else
11456 curr_insn_type = INSN_T1;
11458 else
11460 if (dp_op_sz)
11461 curr_insn_type = INSN_T1;
11462 else
11463 curr_insn_type = INSN_T2;
11466 /* Handle VSQRT. */
11467 else if (opc2 == 0x01 && opc3 == 0x03)
11469 if (dp_op_sz)
11470 curr_insn_type = INSN_T1;
11471 else
11472 curr_insn_type = INSN_T2;
11474 /* Handle VCVT. */
11475 else if (opc2 == 0x07 && opc3 == 0x03)
11477 if (!dp_op_sz)
11478 curr_insn_type = INSN_T1;
11479 else
11480 curr_insn_type = INSN_T2;
11482 else if (opc3 & 0x01)
11484 /* Handle VCVT. */
11485 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11487 if (!bit (arm_insn_r->arm_insn, 18))
11488 curr_insn_type = INSN_T2;
11489 else
11491 if (dp_op_sz)
11492 curr_insn_type = INSN_T1;
11493 else
11494 curr_insn_type = INSN_T2;
11497 /* Handle VCVT. */
11498 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11500 if (dp_op_sz)
11501 curr_insn_type = INSN_T1;
11502 else
11503 curr_insn_type = INSN_T2;
11505 /* Handle VCVTB, VCVTT. */
11506 else if ((opc2 & 0x0e) == 0x02)
11507 curr_insn_type = INSN_T2;
11508 /* Handle VCMP, VCMPE. */
11509 else if ((opc2 & 0x0e) == 0x04)
11510 curr_insn_type = INSN_T3;
11514 switch (curr_insn_type)
11516 case INSN_T0:
11517 reg_vd = reg_vd | (bit_d << 4);
11518 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11519 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11520 arm_insn_r->reg_rec_count = 2;
11521 break;
11523 case INSN_T1:
11524 reg_vd = reg_vd | (bit_d << 4);
11525 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11526 arm_insn_r->reg_rec_count = 1;
11527 break;
11529 case INSN_T2:
11530 reg_vd = (reg_vd << 1) | bit_d;
11531 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11532 arm_insn_r->reg_rec_count = 1;
11533 break;
11535 case INSN_T3:
11536 record_buf[0] = ARM_FPSCR_REGNUM;
11537 arm_insn_r->reg_rec_count = 1;
11538 break;
11540 default:
11541 gdb_assert_not_reached ("no decoding pattern found");
11542 break;
11545 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11546 return 0;
11549 /* Handling opcode 110 insns. */
11551 static int
11552 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11554 uint32_t op1, op1_ebit, coproc;
11556 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11557 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11558 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11560 if ((coproc & 0x0e) == 0x0a)
11562 /* Handle extension register ld/st instructions. */
11563 if (!(op1 & 0x20))
11564 return arm_record_exreg_ld_st_insn (arm_insn_r);
11566 /* 64-bit transfers between arm core and extension registers. */
11567 if ((op1 & 0x3e) == 0x04)
11568 return arm_record_exreg_ld_st_insn (arm_insn_r);
11570 else
11572 /* Handle coprocessor ld/st instructions. */
11573 if (!(op1 & 0x3a))
11575 /* Store. */
11576 if (!op1_ebit)
11577 return arm_record_unsupported_insn (arm_insn_r);
11578 else
11579 /* Load. */
11580 return arm_record_unsupported_insn (arm_insn_r);
11583 /* Move to coprocessor from two arm core registers. */
11584 if (op1 == 0x4)
11585 return arm_record_unsupported_insn (arm_insn_r);
11587 /* Move to two arm core registers from coprocessor. */
11588 if (op1 == 0x5)
11590 uint32_t reg_t[2];
11592 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11593 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11594 arm_insn_r->reg_rec_count = 2;
11596 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11597 return 0;
11600 return arm_record_unsupported_insn (arm_insn_r);
11603 /* Handling opcode 111 insns. */
11605 static int
11606 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11608 uint32_t op, op1_ebit, coproc, bits_24_25;
11609 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11610 struct regcache *reg_cache = arm_insn_r->regcache;
11612 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11613 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11614 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11615 op = bit (arm_insn_r->arm_insn, 4);
11616 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11618 /* Handle arm SWI/SVC system call instructions. */
11619 if (bits_24_25 == 0x3)
11621 if (tdep->arm_syscall_record != NULL)
11623 ULONGEST svc_operand, svc_number;
11625 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11627 if (svc_operand) /* OABI. */
11628 svc_number = svc_operand - 0x900000;
11629 else /* EABI. */
11630 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11632 return tdep->arm_syscall_record (reg_cache, svc_number);
11634 else
11636 printf_unfiltered (_("no syscall record support\n"));
11637 return -1;
11640 else if (bits_24_25 == 0x02)
11642 if (op)
11644 if ((coproc & 0x0e) == 0x0a)
11646 /* 8, 16, and 32-bit transfer */
11647 return arm_record_vdata_transfer_insn (arm_insn_r);
11649 else
11651 if (op1_ebit)
11653 /* MRC, MRC2 */
11654 uint32_t record_buf[1];
11656 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11657 if (record_buf[0] == 15)
11658 record_buf[0] = ARM_PS_REGNUM;
11660 arm_insn_r->reg_rec_count = 1;
11661 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11662 record_buf);
11663 return 0;
11665 else
11667 /* MCR, MCR2 */
11668 return -1;
11672 else
11674 if ((coproc & 0x0e) == 0x0a)
11676 /* VFP data-processing instructions. */
11677 return arm_record_vfp_data_proc_insn (arm_insn_r);
11679 else
11681 /* CDP, CDP2 */
11682 return -1;
11686 else
11688 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11690 if (op1 == 5)
11692 if ((coproc & 0x0e) != 0x0a)
11694 /* MRRC, MRRC2 */
11695 return -1;
11698 else if (op1 == 4 || op1 == 5)
11700 if ((coproc & 0x0e) == 0x0a)
11702 /* 64-bit transfers between ARM core and extension */
11703 return -1;
11705 else if (op1 == 4)
11707 /* MCRR, MCRR2 */
11708 return -1;
11711 else if (op1 == 0 || op1 == 1)
11713 /* UNDEFINED */
11714 return -1;
11716 else
11718 if ((coproc & 0x0e) == 0x0a)
11720 /* Extension register load/store */
11722 else
11724 /* STC, STC2, LDC, LDC2 */
11726 return -1;
11730 return -1;
11733 /* Handling opcode 000 insns. */
11735 static int
11736 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11738 uint32_t record_buf[8];
11739 uint32_t reg_src1 = 0;
11741 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11743 record_buf[0] = ARM_PS_REGNUM;
11744 record_buf[1] = reg_src1;
11745 thumb_insn_r->reg_rec_count = 2;
11747 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11749 return 0;
11753 /* Handling opcode 001 insns. */
11755 static int
11756 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11758 uint32_t record_buf[8];
11759 uint32_t reg_src1 = 0;
11761 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11763 record_buf[0] = ARM_PS_REGNUM;
11764 record_buf[1] = reg_src1;
11765 thumb_insn_r->reg_rec_count = 2;
11767 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11769 return 0;
11772 /* Handling opcode 010 insns. */
11774 static int
11775 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11777 struct regcache *reg_cache = thumb_insn_r->regcache;
11778 uint32_t record_buf[8], record_buf_mem[8];
11780 uint32_t reg_src1 = 0, reg_src2 = 0;
11781 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11783 ULONGEST u_regval[2] = {0};
11785 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11787 if (bit (thumb_insn_r->arm_insn, 12))
11789 /* Handle load/store register offset. */
11790 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11792 if (in_inclusive_range (opB, 4U, 7U))
11794 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11795 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11796 record_buf[0] = reg_src1;
11797 thumb_insn_r->reg_rec_count = 1;
11799 else if (in_inclusive_range (opB, 0U, 2U))
11801 /* STR(2), STRB(2), STRH(2) . */
11802 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11803 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11804 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11805 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11806 if (0 == opB)
11807 record_buf_mem[0] = 4; /* STR (2). */
11808 else if (2 == opB)
11809 record_buf_mem[0] = 1; /* STRB (2). */
11810 else if (1 == opB)
11811 record_buf_mem[0] = 2; /* STRH (2). */
11812 record_buf_mem[1] = u_regval[0] + u_regval[1];
11813 thumb_insn_r->mem_rec_count = 1;
11816 else if (bit (thumb_insn_r->arm_insn, 11))
11818 /* Handle load from literal pool. */
11819 /* LDR(3). */
11820 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11821 record_buf[0] = reg_src1;
11822 thumb_insn_r->reg_rec_count = 1;
11824 else if (opcode1)
11826 /* Special data instructions and branch and exchange */
11827 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11828 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11829 if ((3 == opcode2) && (!opcode3))
11831 /* Branch with exchange. */
11832 record_buf[0] = ARM_PS_REGNUM;
11833 thumb_insn_r->reg_rec_count = 1;
11835 else
11837 /* Format 8; special data processing insns. */
11838 record_buf[0] = ARM_PS_REGNUM;
11839 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11840 | bits (thumb_insn_r->arm_insn, 0, 2));
11841 thumb_insn_r->reg_rec_count = 2;
11844 else
11846 /* Format 5; data processing insns. */
11847 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11848 if (bit (thumb_insn_r->arm_insn, 7))
11850 reg_src1 = reg_src1 + 8;
11852 record_buf[0] = ARM_PS_REGNUM;
11853 record_buf[1] = reg_src1;
11854 thumb_insn_r->reg_rec_count = 2;
11857 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11858 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11859 record_buf_mem);
11861 return 0;
11864 /* Handling opcode 001 insns. */
11866 static int
11867 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11869 struct regcache *reg_cache = thumb_insn_r->regcache;
11870 uint32_t record_buf[8], record_buf_mem[8];
11872 uint32_t reg_src1 = 0;
11873 uint32_t opcode = 0, immed_5 = 0;
11875 ULONGEST u_regval = 0;
11877 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11879 if (opcode)
11881 /* LDR(1). */
11882 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11883 record_buf[0] = reg_src1;
11884 thumb_insn_r->reg_rec_count = 1;
11886 else
11888 /* STR(1). */
11889 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11890 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11891 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11892 record_buf_mem[0] = 4;
11893 record_buf_mem[1] = u_regval + (immed_5 * 4);
11894 thumb_insn_r->mem_rec_count = 1;
11897 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11898 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11899 record_buf_mem);
11901 return 0;
11904 /* Handling opcode 100 insns. */
11906 static int
11907 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11909 struct regcache *reg_cache = thumb_insn_r->regcache;
11910 uint32_t record_buf[8], record_buf_mem[8];
11912 uint32_t reg_src1 = 0;
11913 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11915 ULONGEST u_regval = 0;
11917 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11919 if (3 == opcode)
11921 /* LDR(4). */
11922 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11923 record_buf[0] = reg_src1;
11924 thumb_insn_r->reg_rec_count = 1;
11926 else if (1 == opcode)
11928 /* LDRH(1). */
11929 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11930 record_buf[0] = reg_src1;
11931 thumb_insn_r->reg_rec_count = 1;
11933 else if (2 == opcode)
11935 /* STR(3). */
11936 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11937 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11938 record_buf_mem[0] = 4;
11939 record_buf_mem[1] = u_regval + (immed_8 * 4);
11940 thumb_insn_r->mem_rec_count = 1;
11942 else if (0 == opcode)
11944 /* STRH(1). */
11945 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11946 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11947 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11948 record_buf_mem[0] = 2;
11949 record_buf_mem[1] = u_regval + (immed_5 * 2);
11950 thumb_insn_r->mem_rec_count = 1;
11953 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11954 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11955 record_buf_mem);
11957 return 0;
11960 /* Handling opcode 101 insns. */
11962 static int
11963 thumb_record_misc (insn_decode_record *thumb_insn_r)
11965 struct regcache *reg_cache = thumb_insn_r->regcache;
11967 uint32_t opcode = 0;
11968 uint32_t register_bits = 0, register_count = 0;
11969 uint32_t index = 0, start_address = 0;
11970 uint32_t record_buf[24], record_buf_mem[48];
11971 uint32_t reg_src1;
11973 ULONGEST u_regval = 0;
11975 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11977 if (opcode == 0 || opcode == 1)
11979 /* ADR and ADD (SP plus immediate) */
11981 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11982 record_buf[0] = reg_src1;
11983 thumb_insn_r->reg_rec_count = 1;
11985 else
11987 /* Miscellaneous 16-bit instructions */
11988 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11990 switch (opcode2)
11992 case 6:
11993 /* SETEND and CPS */
11994 break;
11995 case 0:
11996 /* ADD/SUB (SP plus immediate) */
11997 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11998 record_buf[0] = ARM_SP_REGNUM;
11999 thumb_insn_r->reg_rec_count = 1;
12000 break;
12001 case 1: /* fall through */
12002 case 3: /* fall through */
12003 case 9: /* fall through */
12004 case 11:
12005 /* CBNZ, CBZ */
12006 break;
12007 case 2:
12008 /* SXTH, SXTB, UXTH, UXTB */
12009 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12010 thumb_insn_r->reg_rec_count = 1;
12011 break;
12012 case 4: /* fall through */
12013 case 5:
12014 /* PUSH. */
12015 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12016 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12017 while (register_bits)
12019 if (register_bits & 0x00000001)
12020 register_count++;
12021 register_bits = register_bits >> 1;
12023 start_address = u_regval - \
12024 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12025 thumb_insn_r->mem_rec_count = register_count;
12026 while (register_count)
12028 record_buf_mem[(register_count * 2) - 1] = start_address;
12029 record_buf_mem[(register_count * 2) - 2] = 4;
12030 start_address = start_address + 4;
12031 register_count--;
12033 record_buf[0] = ARM_SP_REGNUM;
12034 thumb_insn_r->reg_rec_count = 1;
12035 break;
12036 case 10:
12037 /* REV, REV16, REVSH */
12038 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12039 thumb_insn_r->reg_rec_count = 1;
12040 break;
12041 case 12: /* fall through */
12042 case 13:
12043 /* POP. */
12044 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12045 while (register_bits)
12047 if (register_bits & 0x00000001)
12048 record_buf[index++] = register_count;
12049 register_bits = register_bits >> 1;
12050 register_count++;
12052 record_buf[index++] = ARM_PS_REGNUM;
12053 record_buf[index++] = ARM_SP_REGNUM;
12054 thumb_insn_r->reg_rec_count = index;
12055 break;
12056 case 0xe:
12057 /* BKPT insn. */
12058 /* Handle enhanced software breakpoint insn, BKPT. */
12059 /* CPSR is changed to be executed in ARM state, disabling normal
12060 interrupts, entering abort mode. */
12061 /* According to high vector configuration PC is set. */
12062 /* User hits breakpoint and type reverse, in that case, we need to go back with
12063 previous CPSR and Program Counter. */
12064 record_buf[0] = ARM_PS_REGNUM;
12065 record_buf[1] = ARM_LR_REGNUM;
12066 thumb_insn_r->reg_rec_count = 2;
12067 /* We need to save SPSR value, which is not yet done. */
12068 printf_unfiltered (_("Process record does not support instruction "
12069 "0x%0x at address %s.\n"),
12070 thumb_insn_r->arm_insn,
12071 paddress (thumb_insn_r->gdbarch,
12072 thumb_insn_r->this_addr));
12073 return -1;
12075 case 0xf:
12076 /* If-Then, and hints */
12077 break;
12078 default:
12079 return -1;
12083 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12084 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12085 record_buf_mem);
12087 return 0;
12090 /* Handling opcode 110 insns. */
12092 static int
12093 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12095 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12096 struct regcache *reg_cache = thumb_insn_r->regcache;
12098 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12099 uint32_t reg_src1 = 0;
12100 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12101 uint32_t index = 0, start_address = 0;
12102 uint32_t record_buf[24], record_buf_mem[48];
12104 ULONGEST u_regval = 0;
12106 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12107 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12109 if (1 == opcode2)
12112 /* LDMIA. */
12113 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12114 /* Get Rn. */
12115 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12116 while (register_bits)
12118 if (register_bits & 0x00000001)
12119 record_buf[index++] = register_count;
12120 register_bits = register_bits >> 1;
12121 register_count++;
12123 record_buf[index++] = reg_src1;
12124 thumb_insn_r->reg_rec_count = index;
12126 else if (0 == opcode2)
12128 /* It handles both STMIA. */
12129 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12130 /* Get Rn. */
12131 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12132 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12133 while (register_bits)
12135 if (register_bits & 0x00000001)
12136 register_count++;
12137 register_bits = register_bits >> 1;
12139 start_address = u_regval;
12140 thumb_insn_r->mem_rec_count = register_count;
12141 while (register_count)
12143 record_buf_mem[(register_count * 2) - 1] = start_address;
12144 record_buf_mem[(register_count * 2) - 2] = 4;
12145 start_address = start_address + 4;
12146 register_count--;
12149 else if (0x1F == opcode1)
12151 /* Handle arm syscall insn. */
12152 if (tdep->arm_syscall_record != NULL)
12154 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12155 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12157 else
12159 printf_unfiltered (_("no syscall record support\n"));
12160 return -1;
12164 /* B (1), conditional branch is automatically taken care in process_record,
12165 as PC is saved there. */
12167 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12168 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12169 record_buf_mem);
12171 return ret;
12174 /* Handling opcode 111 insns. */
12176 static int
12177 thumb_record_branch (insn_decode_record *thumb_insn_r)
12179 uint32_t record_buf[8];
12180 uint32_t bits_h = 0;
12182 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12184 if (2 == bits_h || 3 == bits_h)
12186 /* BL */
12187 record_buf[0] = ARM_LR_REGNUM;
12188 thumb_insn_r->reg_rec_count = 1;
12190 else if (1 == bits_h)
12192 /* BLX(1). */
12193 record_buf[0] = ARM_PS_REGNUM;
12194 record_buf[1] = ARM_LR_REGNUM;
12195 thumb_insn_r->reg_rec_count = 2;
12198 /* B(2) is automatically taken care in process_record, as PC is
12199 saved there. */
12201 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12203 return 0;
12206 /* Handler for thumb2 load/store multiple instructions. */
12208 static int
12209 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12211 struct regcache *reg_cache = thumb2_insn_r->regcache;
12213 uint32_t reg_rn, op;
12214 uint32_t register_bits = 0, register_count = 0;
12215 uint32_t index = 0, start_address = 0;
12216 uint32_t record_buf[24], record_buf_mem[48];
12218 ULONGEST u_regval = 0;
12220 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12221 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12223 if (0 == op || 3 == op)
12225 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12227 /* Handle RFE instruction. */
12228 record_buf[0] = ARM_PS_REGNUM;
12229 thumb2_insn_r->reg_rec_count = 1;
12231 else
12233 /* Handle SRS instruction after reading banked SP. */
12234 return arm_record_unsupported_insn (thumb2_insn_r);
12237 else if (1 == op || 2 == op)
12239 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12241 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12242 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12243 while (register_bits)
12245 if (register_bits & 0x00000001)
12246 record_buf[index++] = register_count;
12248 register_count++;
12249 register_bits = register_bits >> 1;
12251 record_buf[index++] = reg_rn;
12252 record_buf[index++] = ARM_PS_REGNUM;
12253 thumb2_insn_r->reg_rec_count = index;
12255 else
12257 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12258 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12259 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12260 while (register_bits)
12262 if (register_bits & 0x00000001)
12263 register_count++;
12265 register_bits = register_bits >> 1;
12268 if (1 == op)
12270 /* Start address calculation for LDMDB/LDMEA. */
12271 start_address = u_regval;
12273 else if (2 == op)
12275 /* Start address calculation for LDMDB/LDMEA. */
12276 start_address = u_regval - register_count * 4;
12279 thumb2_insn_r->mem_rec_count = register_count;
12280 while (register_count)
12282 record_buf_mem[register_count * 2 - 1] = start_address;
12283 record_buf_mem[register_count * 2 - 2] = 4;
12284 start_address = start_address + 4;
12285 register_count--;
12287 record_buf[0] = reg_rn;
12288 record_buf[1] = ARM_PS_REGNUM;
12289 thumb2_insn_r->reg_rec_count = 2;
12293 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12294 record_buf_mem);
12295 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12296 record_buf);
12297 return ARM_RECORD_SUCCESS;
12300 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12301 instructions. */
12303 static int
12304 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12306 struct regcache *reg_cache = thumb2_insn_r->regcache;
12308 uint32_t reg_rd, reg_rn, offset_imm;
12309 uint32_t reg_dest1, reg_dest2;
12310 uint32_t address, offset_addr;
12311 uint32_t record_buf[8], record_buf_mem[8];
12312 uint32_t op1, op2, op3;
12314 ULONGEST u_regval[2];
12316 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12317 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12318 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12320 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12322 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12324 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12325 record_buf[0] = reg_dest1;
12326 record_buf[1] = ARM_PS_REGNUM;
12327 thumb2_insn_r->reg_rec_count = 2;
12330 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12332 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12333 record_buf[2] = reg_dest2;
12334 thumb2_insn_r->reg_rec_count = 3;
12337 else
12339 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12340 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12342 if (0 == op1 && 0 == op2)
12344 /* Handle STREX. */
12345 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12346 address = u_regval[0] + (offset_imm * 4);
12347 record_buf_mem[0] = 4;
12348 record_buf_mem[1] = address;
12349 thumb2_insn_r->mem_rec_count = 1;
12350 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12351 record_buf[0] = reg_rd;
12352 thumb2_insn_r->reg_rec_count = 1;
12354 else if (1 == op1 && 0 == op2)
12356 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12357 record_buf[0] = reg_rd;
12358 thumb2_insn_r->reg_rec_count = 1;
12359 address = u_regval[0];
12360 record_buf_mem[1] = address;
12362 if (4 == op3)
12364 /* Handle STREXB. */
12365 record_buf_mem[0] = 1;
12366 thumb2_insn_r->mem_rec_count = 1;
12368 else if (5 == op3)
12370 /* Handle STREXH. */
12371 record_buf_mem[0] = 2 ;
12372 thumb2_insn_r->mem_rec_count = 1;
12374 else if (7 == op3)
12376 /* Handle STREXD. */
12377 address = u_regval[0];
12378 record_buf_mem[0] = 4;
12379 record_buf_mem[2] = 4;
12380 record_buf_mem[3] = address + 4;
12381 thumb2_insn_r->mem_rec_count = 2;
12384 else
12386 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12388 if (bit (thumb2_insn_r->arm_insn, 24))
12390 if (bit (thumb2_insn_r->arm_insn, 23))
12391 offset_addr = u_regval[0] + (offset_imm * 4);
12392 else
12393 offset_addr = u_regval[0] - (offset_imm * 4);
12395 address = offset_addr;
12397 else
12398 address = u_regval[0];
12400 record_buf_mem[0] = 4;
12401 record_buf_mem[1] = address;
12402 record_buf_mem[2] = 4;
12403 record_buf_mem[3] = address + 4;
12404 thumb2_insn_r->mem_rec_count = 2;
12405 record_buf[0] = reg_rn;
12406 thumb2_insn_r->reg_rec_count = 1;
12410 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12411 record_buf);
12412 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12413 record_buf_mem);
12414 return ARM_RECORD_SUCCESS;
12417 /* Handler for thumb2 data processing (shift register and modified immediate)
12418 instructions. */
12420 static int
12421 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12423 uint32_t reg_rd, op;
12424 uint32_t record_buf[8];
12426 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12427 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12429 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12431 record_buf[0] = ARM_PS_REGNUM;
12432 thumb2_insn_r->reg_rec_count = 1;
12434 else
12436 record_buf[0] = reg_rd;
12437 record_buf[1] = ARM_PS_REGNUM;
12438 thumb2_insn_r->reg_rec_count = 2;
12441 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12442 record_buf);
12443 return ARM_RECORD_SUCCESS;
12446 /* Generic handler for thumb2 instructions which effect destination and PS
12447 registers. */
12449 static int
12450 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12452 uint32_t reg_rd;
12453 uint32_t record_buf[8];
12455 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12457 record_buf[0] = reg_rd;
12458 record_buf[1] = ARM_PS_REGNUM;
12459 thumb2_insn_r->reg_rec_count = 2;
12461 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12462 record_buf);
12463 return ARM_RECORD_SUCCESS;
12466 /* Handler for thumb2 branch and miscellaneous control instructions. */
12468 static int
12469 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12471 uint32_t op, op1, op2;
12472 uint32_t record_buf[8];
12474 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12475 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12476 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12478 /* Handle MSR insn. */
12479 if (!(op1 & 0x2) && 0x38 == op)
12481 if (!(op2 & 0x3))
12483 /* CPSR is going to be changed. */
12484 record_buf[0] = ARM_PS_REGNUM;
12485 thumb2_insn_r->reg_rec_count = 1;
12487 else
12489 arm_record_unsupported_insn(thumb2_insn_r);
12490 return -1;
12493 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12495 /* BLX. */
12496 record_buf[0] = ARM_PS_REGNUM;
12497 record_buf[1] = ARM_LR_REGNUM;
12498 thumb2_insn_r->reg_rec_count = 2;
12501 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12502 record_buf);
12503 return ARM_RECORD_SUCCESS;
12506 /* Handler for thumb2 store single data item instructions. */
12508 static int
12509 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12511 struct regcache *reg_cache = thumb2_insn_r->regcache;
12513 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12514 uint32_t address, offset_addr;
12515 uint32_t record_buf[8], record_buf_mem[8];
12516 uint32_t op1, op2;
12518 ULONGEST u_regval[2];
12520 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12521 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12522 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12523 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12525 if (bit (thumb2_insn_r->arm_insn, 23))
12527 /* T2 encoding. */
12528 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12529 offset_addr = u_regval[0] + offset_imm;
12530 address = offset_addr;
12532 else
12534 /* T3 encoding. */
12535 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12537 /* Handle STRB (register). */
12538 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12539 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12540 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12541 offset_addr = u_regval[1] << shift_imm;
12542 address = u_regval[0] + offset_addr;
12544 else
12546 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12547 if (bit (thumb2_insn_r->arm_insn, 10))
12549 if (bit (thumb2_insn_r->arm_insn, 9))
12550 offset_addr = u_regval[0] + offset_imm;
12551 else
12552 offset_addr = u_regval[0] - offset_imm;
12554 address = offset_addr;
12556 else
12557 address = u_regval[0];
12561 switch (op1)
12563 /* Store byte instructions. */
12564 case 4:
12565 case 0:
12566 record_buf_mem[0] = 1;
12567 break;
12568 /* Store half word instructions. */
12569 case 1:
12570 case 5:
12571 record_buf_mem[0] = 2;
12572 break;
12573 /* Store word instructions. */
12574 case 2:
12575 case 6:
12576 record_buf_mem[0] = 4;
12577 break;
12579 default:
12580 gdb_assert_not_reached ("no decoding pattern found");
12581 break;
12584 record_buf_mem[1] = address;
12585 thumb2_insn_r->mem_rec_count = 1;
12586 record_buf[0] = reg_rn;
12587 thumb2_insn_r->reg_rec_count = 1;
12589 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12590 record_buf);
12591 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12592 record_buf_mem);
12593 return ARM_RECORD_SUCCESS;
12596 /* Handler for thumb2 load memory hints instructions. */
12598 static int
12599 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12601 uint32_t record_buf[8];
12602 uint32_t reg_rt, reg_rn;
12604 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12605 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12607 if (ARM_PC_REGNUM != reg_rt)
12609 record_buf[0] = reg_rt;
12610 record_buf[1] = reg_rn;
12611 record_buf[2] = ARM_PS_REGNUM;
12612 thumb2_insn_r->reg_rec_count = 3;
12614 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12615 record_buf);
12616 return ARM_RECORD_SUCCESS;
12619 return ARM_RECORD_FAILURE;
12622 /* Handler for thumb2 load word instructions. */
12624 static int
12625 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12627 uint32_t record_buf[8];
12629 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12630 record_buf[1] = ARM_PS_REGNUM;
12631 thumb2_insn_r->reg_rec_count = 2;
12633 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12634 record_buf);
12635 return ARM_RECORD_SUCCESS;
12638 /* Handler for thumb2 long multiply, long multiply accumulate, and
12639 divide instructions. */
12641 static int
12642 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12644 uint32_t opcode1 = 0, opcode2 = 0;
12645 uint32_t record_buf[8];
12647 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12648 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12650 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12652 /* Handle SMULL, UMULL, SMULAL. */
12653 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12654 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12655 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12656 record_buf[2] = ARM_PS_REGNUM;
12657 thumb2_insn_r->reg_rec_count = 3;
12659 else if (1 == opcode1 || 3 == opcode2)
12661 /* Handle SDIV and UDIV. */
12662 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12663 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12664 record_buf[2] = ARM_PS_REGNUM;
12665 thumb2_insn_r->reg_rec_count = 3;
12667 else
12668 return ARM_RECORD_FAILURE;
12670 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12671 record_buf);
12672 return ARM_RECORD_SUCCESS;
12675 /* Record handler for thumb32 coprocessor instructions. */
12677 static int
12678 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12680 if (bit (thumb2_insn_r->arm_insn, 25))
12681 return arm_record_coproc_data_proc (thumb2_insn_r);
12682 else
12683 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12686 /* Record handler for advance SIMD structure load/store instructions. */
12688 static int
12689 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12691 struct regcache *reg_cache = thumb2_insn_r->regcache;
12692 uint32_t l_bit, a_bit, b_bits;
12693 uint32_t record_buf[128], record_buf_mem[128];
12694 uint32_t reg_rn, reg_vd, address, f_elem;
12695 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12696 uint8_t f_ebytes;
12698 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12699 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12700 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12701 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12702 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12703 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12704 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12705 f_elem = 8 / f_ebytes;
12707 if (!l_bit)
12709 ULONGEST u_regval = 0;
12710 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12711 address = u_regval;
12713 if (!a_bit)
12715 /* Handle VST1. */
12716 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12718 if (b_bits == 0x07)
12719 bf_regs = 1;
12720 else if (b_bits == 0x0a)
12721 bf_regs = 2;
12722 else if (b_bits == 0x06)
12723 bf_regs = 3;
12724 else if (b_bits == 0x02)
12725 bf_regs = 4;
12726 else
12727 bf_regs = 0;
12729 for (index_r = 0; index_r < bf_regs; index_r++)
12731 for (index_e = 0; index_e < f_elem; index_e++)
12733 record_buf_mem[index_m++] = f_ebytes;
12734 record_buf_mem[index_m++] = address;
12735 address = address + f_ebytes;
12736 thumb2_insn_r->mem_rec_count += 1;
12740 /* Handle VST2. */
12741 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12743 if (b_bits == 0x09 || b_bits == 0x08)
12744 bf_regs = 1;
12745 else if (b_bits == 0x03)
12746 bf_regs = 2;
12747 else
12748 bf_regs = 0;
12750 for (index_r = 0; index_r < bf_regs; index_r++)
12751 for (index_e = 0; index_e < f_elem; index_e++)
12753 for (loop_t = 0; loop_t < 2; loop_t++)
12755 record_buf_mem[index_m++] = f_ebytes;
12756 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12757 thumb2_insn_r->mem_rec_count += 1;
12759 address = address + (2 * f_ebytes);
12762 /* Handle VST3. */
12763 else if ((b_bits & 0x0e) == 0x04)
12765 for (index_e = 0; index_e < f_elem; index_e++)
12767 for (loop_t = 0; loop_t < 3; loop_t++)
12769 record_buf_mem[index_m++] = f_ebytes;
12770 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12771 thumb2_insn_r->mem_rec_count += 1;
12773 address = address + (3 * f_ebytes);
12776 /* Handle VST4. */
12777 else if (!(b_bits & 0x0e))
12779 for (index_e = 0; index_e < f_elem; index_e++)
12781 for (loop_t = 0; loop_t < 4; loop_t++)
12783 record_buf_mem[index_m++] = f_ebytes;
12784 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12785 thumb2_insn_r->mem_rec_count += 1;
12787 address = address + (4 * f_ebytes);
12791 else
12793 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12795 if (bft_size == 0x00)
12796 f_ebytes = 1;
12797 else if (bft_size == 0x01)
12798 f_ebytes = 2;
12799 else if (bft_size == 0x02)
12800 f_ebytes = 4;
12801 else
12802 f_ebytes = 0;
12804 /* Handle VST1. */
12805 if (!(b_bits & 0x0b) || b_bits == 0x08)
12806 thumb2_insn_r->mem_rec_count = 1;
12807 /* Handle VST2. */
12808 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12809 thumb2_insn_r->mem_rec_count = 2;
12810 /* Handle VST3. */
12811 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12812 thumb2_insn_r->mem_rec_count = 3;
12813 /* Handle VST4. */
12814 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12815 thumb2_insn_r->mem_rec_count = 4;
12817 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12819 record_buf_mem[index_m] = f_ebytes;
12820 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12824 else
12826 if (!a_bit)
12828 /* Handle VLD1. */
12829 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12830 thumb2_insn_r->reg_rec_count = 1;
12831 /* Handle VLD2. */
12832 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12833 thumb2_insn_r->reg_rec_count = 2;
12834 /* Handle VLD3. */
12835 else if ((b_bits & 0x0e) == 0x04)
12836 thumb2_insn_r->reg_rec_count = 3;
12837 /* Handle VLD4. */
12838 else if (!(b_bits & 0x0e))
12839 thumb2_insn_r->reg_rec_count = 4;
12841 else
12843 /* Handle VLD1. */
12844 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12845 thumb2_insn_r->reg_rec_count = 1;
12846 /* Handle VLD2. */
12847 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12848 thumb2_insn_r->reg_rec_count = 2;
12849 /* Handle VLD3. */
12850 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12851 thumb2_insn_r->reg_rec_count = 3;
12852 /* Handle VLD4. */
12853 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12854 thumb2_insn_r->reg_rec_count = 4;
12856 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12857 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12861 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12863 record_buf[index_r] = reg_rn;
12864 thumb2_insn_r->reg_rec_count += 1;
12867 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12868 record_buf);
12869 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12870 record_buf_mem);
12871 return 0;
12874 /* Decodes thumb2 instruction type and invokes its record handler. */
12876 static unsigned int
12877 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12879 uint32_t op, op1, op2;
12881 op = bit (thumb2_insn_r->arm_insn, 15);
12882 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12883 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12885 if (op1 == 0x01)
12887 if (!(op2 & 0x64 ))
12889 /* Load/store multiple instruction. */
12890 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12892 else if ((op2 & 0x64) == 0x4)
12894 /* Load/store (dual/exclusive) and table branch instruction. */
12895 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12897 else if ((op2 & 0x60) == 0x20)
12899 /* Data-processing (shifted register). */
12900 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12902 else if (op2 & 0x40)
12904 /* Co-processor instructions. */
12905 return thumb2_record_coproc_insn (thumb2_insn_r);
12908 else if (op1 == 0x02)
12910 if (op)
12912 /* Branches and miscellaneous control instructions. */
12913 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12915 else if (op2 & 0x20)
12917 /* Data-processing (plain binary immediate) instruction. */
12918 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12920 else
12922 /* Data-processing (modified immediate). */
12923 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12926 else if (op1 == 0x03)
12928 if (!(op2 & 0x71 ))
12930 /* Store single data item. */
12931 return thumb2_record_str_single_data (thumb2_insn_r);
12933 else if (!((op2 & 0x71) ^ 0x10))
12935 /* Advanced SIMD or structure load/store instructions. */
12936 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12938 else if (!((op2 & 0x67) ^ 0x01))
12940 /* Load byte, memory hints instruction. */
12941 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12943 else if (!((op2 & 0x67) ^ 0x03))
12945 /* Load halfword, memory hints instruction. */
12946 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12948 else if (!((op2 & 0x67) ^ 0x05))
12950 /* Load word instruction. */
12951 return thumb2_record_ld_word (thumb2_insn_r);
12953 else if (!((op2 & 0x70) ^ 0x20))
12955 /* Data-processing (register) instruction. */
12956 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12958 else if (!((op2 & 0x78) ^ 0x30))
12960 /* Multiply, multiply accumulate, abs diff instruction. */
12961 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12963 else if (!((op2 & 0x78) ^ 0x38))
12965 /* Long multiply, long multiply accumulate, and divide. */
12966 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12968 else if (op2 & 0x40)
12970 /* Co-processor instructions. */
12971 return thumb2_record_coproc_insn (thumb2_insn_r);
12975 return -1;
12978 namespace {
12979 /* Abstract memory reader. */
12981 class abstract_memory_reader
12983 public:
12984 /* Read LEN bytes of target memory at address MEMADDR, placing the
12985 results in GDB's memory at BUF. Return true on success. */
12987 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12990 /* Instruction reader from real target. */
12992 class instruction_reader : public abstract_memory_reader
12994 public:
12995 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
12997 if (target_read_memory (memaddr, buf, len))
12998 return false;
12999 else
13000 return true;
13004 } // namespace
13006 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13007 and positive val on fauilure. */
13009 static int
13010 extract_arm_insn (abstract_memory_reader& reader,
13011 insn_decode_record *insn_record, uint32_t insn_size)
13013 gdb_byte buf[insn_size];
13015 memset (&buf[0], 0, insn_size);
13017 if (!reader.read (insn_record->this_addr, buf, insn_size))
13018 return 1;
13019 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13020 insn_size,
13021 gdbarch_byte_order_for_code (insn_record->gdbarch));
13022 return 0;
13025 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13027 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13028 dispatch it. */
13030 static int
13031 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13032 record_type_t record_type, uint32_t insn_size)
13035 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13036 instruction. */
13037 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13039 arm_record_data_proc_misc_ld_str, /* 000. */
13040 arm_record_data_proc_imm, /* 001. */
13041 arm_record_ld_st_imm_offset, /* 010. */
13042 arm_record_ld_st_reg_offset, /* 011. */
13043 arm_record_ld_st_multiple, /* 100. */
13044 arm_record_b_bl, /* 101. */
13045 arm_record_asimd_vfp_coproc, /* 110. */
13046 arm_record_coproc_data_proc /* 111. */
13049 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13050 instruction. */
13051 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13053 thumb_record_shift_add_sub, /* 000. */
13054 thumb_record_add_sub_cmp_mov, /* 001. */
13055 thumb_record_ld_st_reg_offset, /* 010. */
13056 thumb_record_ld_st_imm_offset, /* 011. */
13057 thumb_record_ld_st_stack, /* 100. */
13058 thumb_record_misc, /* 101. */
13059 thumb_record_ldm_stm_swi, /* 110. */
13060 thumb_record_branch /* 111. */
13063 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13064 uint32_t insn_id = 0;
13066 if (extract_arm_insn (reader, arm_record, insn_size))
13068 if (record_debug)
13070 printf_unfiltered (_("Process record: error reading memory at "
13071 "addr %s len = %d.\n"),
13072 paddress (arm_record->gdbarch,
13073 arm_record->this_addr), insn_size);
13075 return -1;
13077 else if (ARM_RECORD == record_type)
13079 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13080 insn_id = bits (arm_record->arm_insn, 25, 27);
13082 if (arm_record->cond == 0xf)
13083 ret = arm_record_extension_space (arm_record);
13084 else
13086 /* If this insn has fallen into extension space
13087 then we need not decode it anymore. */
13088 ret = arm_handle_insn[insn_id] (arm_record);
13090 if (ret != ARM_RECORD_SUCCESS)
13092 arm_record_unsupported_insn (arm_record);
13093 ret = -1;
13096 else if (THUMB_RECORD == record_type)
13098 /* As thumb does not have condition codes, we set negative. */
13099 arm_record->cond = -1;
13100 insn_id = bits (arm_record->arm_insn, 13, 15);
13101 ret = thumb_handle_insn[insn_id] (arm_record);
13102 if (ret != ARM_RECORD_SUCCESS)
13104 arm_record_unsupported_insn (arm_record);
13105 ret = -1;
13108 else if (THUMB2_RECORD == record_type)
13110 /* As thumb does not have condition codes, we set negative. */
13111 arm_record->cond = -1;
13113 /* Swap first half of 32bit thumb instruction with second half. */
13114 arm_record->arm_insn
13115 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13117 ret = thumb2_record_decode_insn_handler (arm_record);
13119 if (ret != ARM_RECORD_SUCCESS)
13121 arm_record_unsupported_insn (arm_record);
13122 ret = -1;
13125 else
13127 /* Throw assertion. */
13128 gdb_assert_not_reached ("not a valid instruction, could not decode");
13131 return ret;
13134 #if GDB_SELF_TEST
13135 namespace selftests {
13137 /* Provide both 16-bit and 32-bit thumb instructions. */
13139 class instruction_reader_thumb : public abstract_memory_reader
13141 public:
13142 template<size_t SIZE>
13143 instruction_reader_thumb (enum bfd_endian endian,
13144 const uint16_t (&insns)[SIZE])
13145 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13148 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13150 SELF_CHECK (len == 4 || len == 2);
13151 SELF_CHECK (memaddr % 2 == 0);
13152 SELF_CHECK ((memaddr / 2) < m_insns_size);
13154 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13155 if (len == 4)
13157 store_unsigned_integer (&buf[2], 2, m_endian,
13158 m_insns[memaddr / 2 + 1]);
13160 return true;
13163 private:
13164 enum bfd_endian m_endian;
13165 const uint16_t *m_insns;
13166 size_t m_insns_size;
13169 static void
13170 arm_record_test (void)
13172 struct gdbarch_info info;
13173 gdbarch_info_init (&info);
13174 info.bfd_arch_info = bfd_scan_arch ("arm");
13176 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13178 SELF_CHECK (gdbarch != NULL);
13180 /* 16-bit Thumb instructions. */
13182 insn_decode_record arm_record;
13184 memset (&arm_record, 0, sizeof (insn_decode_record));
13185 arm_record.gdbarch = gdbarch;
13187 static const uint16_t insns[] = {
13188 /* db b2 uxtb r3, r3 */
13189 0xb2db,
13190 /* cd 58 ldr r5, [r1, r3] */
13191 0x58cd,
13194 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13195 instruction_reader_thumb reader (endian, insns);
13196 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13197 THUMB_INSN_SIZE_BYTES);
13199 SELF_CHECK (ret == 0);
13200 SELF_CHECK (arm_record.mem_rec_count == 0);
13201 SELF_CHECK (arm_record.reg_rec_count == 1);
13202 SELF_CHECK (arm_record.arm_regs[0] == 3);
13204 arm_record.this_addr += 2;
13205 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13206 THUMB_INSN_SIZE_BYTES);
13208 SELF_CHECK (ret == 0);
13209 SELF_CHECK (arm_record.mem_rec_count == 0);
13210 SELF_CHECK (arm_record.reg_rec_count == 1);
13211 SELF_CHECK (arm_record.arm_regs[0] == 5);
13214 /* 32-bit Thumb-2 instructions. */
13216 insn_decode_record arm_record;
13218 memset (&arm_record, 0, sizeof (insn_decode_record));
13219 arm_record.gdbarch = gdbarch;
13221 static const uint16_t insns[] = {
13222 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13223 0xee1d, 0x7f70,
13226 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13227 instruction_reader_thumb reader (endian, insns);
13228 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13229 THUMB2_INSN_SIZE_BYTES);
13231 SELF_CHECK (ret == 0);
13232 SELF_CHECK (arm_record.mem_rec_count == 0);
13233 SELF_CHECK (arm_record.reg_rec_count == 1);
13234 SELF_CHECK (arm_record.arm_regs[0] == 7);
13237 } // namespace selftests
13238 #endif /* GDB_SELF_TEST */
13240 /* Cleans up local record registers and memory allocations. */
13242 static void
13243 deallocate_reg_mem (insn_decode_record *record)
13245 xfree (record->arm_regs);
13246 xfree (record->arm_mems);
13250 /* Parse the current instruction and record the values of the registers and
13251 memory that will be changed in current instruction to record_arch_list".
13252 Return -1 if something is wrong. */
13255 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13256 CORE_ADDR insn_addr)
13259 uint32_t no_of_rec = 0;
13260 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13261 ULONGEST t_bit = 0, insn_id = 0;
13263 ULONGEST u_regval = 0;
13265 insn_decode_record arm_record;
13267 memset (&arm_record, 0, sizeof (insn_decode_record));
13268 arm_record.regcache = regcache;
13269 arm_record.this_addr = insn_addr;
13270 arm_record.gdbarch = gdbarch;
13273 if (record_debug > 1)
13275 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13276 "addr = %s\n",
13277 paddress (gdbarch, arm_record.this_addr));
13280 instruction_reader reader;
13281 if (extract_arm_insn (reader, &arm_record, 2))
13283 if (record_debug)
13285 printf_unfiltered (_("Process record: error reading memory at "
13286 "addr %s len = %d.\n"),
13287 paddress (arm_record.gdbarch,
13288 arm_record.this_addr), 2);
13290 return -1;
13293 /* Check the insn, whether it is thumb or arm one. */
13295 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13296 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13299 if (!(u_regval & t_bit))
13301 /* We are decoding arm insn. */
13302 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13304 else
13306 insn_id = bits (arm_record.arm_insn, 11, 15);
13307 /* is it thumb2 insn? */
13308 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13310 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13311 THUMB2_INSN_SIZE_BYTES);
13313 else
13315 /* We are decoding thumb insn. */
13316 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13317 THUMB_INSN_SIZE_BYTES);
13321 if (0 == ret)
13323 /* Record registers. */
13324 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13325 if (arm_record.arm_regs)
13327 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13329 if (record_full_arch_list_add_reg
13330 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13331 ret = -1;
13334 /* Record memories. */
13335 if (arm_record.arm_mems)
13337 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13339 if (record_full_arch_list_add_mem
13340 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13341 arm_record.arm_mems[no_of_rec].len))
13342 ret = -1;
13346 if (record_full_arch_list_add_end ())
13347 ret = -1;
13351 deallocate_reg_mem (&arm_record);
13353 return ret;