1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2018 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data
*arm_objfile_data_key
;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
100 DEF_VEC_O(arm_mapping_symbol_s
);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s
) **section_maps
;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element
*setarmcmdlist
= NULL
;
109 static struct cmd_list_element
*showarmcmdlist
= NULL
;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings
[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
125 static const char *current_fp_model
= "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings
[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
138 static const char *arm_abi_string
= "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings
[] =
149 static const char *arm_fallback_mode_string
= "auto";
150 static const char *arm_force_mode_string
= "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases
[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names
[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options
;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles
;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style
;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element
*);
225 static void show_disassembly_style_sfunc (struct ui_file
*, int,
226 struct cmd_list_element
*,
229 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
230 readable_regcache
*regcache
,
231 int regnum
, gdb_byte
*buf
);
232 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
233 struct regcache
*regcache
,
234 int regnum
, const gdb_byte
*buf
);
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops
= {
242 arm_get_next_pcs_read_memory_unsigned_integer
,
243 arm_get_next_pcs_syscall_next_pc
,
244 arm_get_next_pcs_addr_bits_remove
,
245 arm_get_next_pcs_is_thumb
,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
262 /* The register used to hold the frame pointer for this frame. */
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg
*saved_regs
;
269 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
270 CORE_ADDR prologue_start
,
271 CORE_ADDR prologue_end
,
272 struct arm_prologue_cache
*cache
);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
288 if (gdbarch_tdep (gdbarch
)->is_m
)
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache
*regcache
)
300 ULONGEST t_bit
= arm_psr_thumb_bit (regcache
->arch ());
302 cpsr
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
304 return (cpsr
& t_bit
) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info
*frame
)
313 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
321 return (cpsr
& t_bit
) != 0;
324 /* Callback for VEC_lower_bound. */
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
328 const struct arm_mapping_symbol
*rhs
)
330 return lhs
->value
< rhs
->value
;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
338 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
340 struct obj_section
*sec
;
342 /* If there are mapping symbols, consult them. */
343 sec
= find_pc_section (memaddr
);
346 struct arm_per_objfile
*data
;
347 VEC(arm_mapping_symbol_s
) *map
;
348 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
352 data
= (struct arm_per_objfile
*) objfile_data (sec
->objfile
,
353 arm_objfile_data_key
);
356 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
357 if (!VEC_empty (arm_mapping_symbol_s
, map
))
359 struct arm_mapping_symbol
*map_sym
;
361 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
362 arm_compare_mapping_symbols
);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
370 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
371 if (map_sym
->value
== map_key
.value
)
374 *start
= map_sym
->value
+ obj_section_addr (sec
);
375 return map_sym
->type
;
381 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
383 *start
= map_sym
->value
+ obj_section_addr (sec
);
384 return map_sym
->type
;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
400 struct bound_minimal_symbol sym
;
402 arm_displaced_step_closure
*dsc
403 = ((arm_displaced_step_closure
* )
404 get_displaced_step_closure_by_addr (memaddr
));
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
411 fprintf_unfiltered (gdb_stdlog
,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc
->insn_addr
,
414 (unsigned long) memaddr
);
415 memaddr
= dsc
->insn_addr
;
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr
))
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string
, "arm") == 0)
425 if (strcmp (arm_force_mode_string
, "thumb") == 0)
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch
)->is_m
)
432 /* If there are mapping symbols, consult them. */
433 type
= arm_find_mapping_symbol (memaddr
, NULL
);
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym
= lookup_minimal_symbol_by_pc (memaddr
);
440 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
445 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers
)
454 return arm_frame_is_thumb (get_current_frame ());
456 /* Otherwise we're out of luck; we assume ARM. */
460 /* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
495 arm_m_addr_is_magic (CORE_ADDR addr
)
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
507 /* Address is magic. */
511 /* Address is not magic. */
516 /* Remove useless bits from addresses in a running program. */
518 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch
)->is_m
523 && arm_m_addr_is_magic (val
))
527 return UNMAKE_THUMB_ADDR (val
);
529 return (val
& 0x03fffffc);
532 /* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
537 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
539 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
540 struct bound_minimal_symbol msym
;
542 msym
= lookup_minimal_symbol_by_pc (pc
);
543 if (msym
.minsym
!= NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
545 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
547 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
549 /* The GNU linker's Thumb call stub to foo is named
551 if (strstr (name
, "_from_thumb") != NULL
)
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
557 if (startswith (name
, "__truncdfsf2"))
559 if (startswith (name
, "__aeabi_d2f"))
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name
, "__tls_get_addr"))
565 if (startswith (name
, "__aeabi_read_tp"))
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
576 && read_code_unsigned_integer (pc
, 4, byte_order_for_code
)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
579 == 0xe240f01f) /* sub pc, r0, #31 */
586 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
589 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
595 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
601 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
604 thumb_expand_immediate (unsigned int imm
)
606 unsigned int count
= imm
>> 7;
614 return (imm
& 0xff) | ((imm
& 0xff) << 16);
616 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
618 return (imm
& 0xff) | ((imm
& 0xff) << 8)
619 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
622 return (0x80 | (imm
& 0x7f)) << (32 - count
);
625 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
629 thumb_instruction_restores_sp (unsigned short insn
)
631 return (insn
== 0x46bd /* mov sp, r7 */
632 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
633 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
636 /* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
643 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
644 CORE_ADDR start
, CORE_ADDR limit
,
645 struct arm_prologue_cache
*cache
)
647 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
648 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
652 CORE_ADDR unrecognized_pc
= 0;
654 for (i
= 0; i
< 16; i
++)
655 regs
[i
] = pv_register (i
, 0);
656 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
658 while (start
< limit
)
662 insn
= read_code_unsigned_integer (start
, 2, byte_order_for_code
);
664 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
669 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
678 if (mask
& (1 << regno
))
680 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
682 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
685 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
687 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
688 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
691 else if (thumb_instruction_restores_sp (insn
))
693 /* Don't scan past the epilogue. */
696 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
699 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
701 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
703 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
705 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
707 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
709 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
710 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
711 regs
[bits (insn
, 6, 8)]);
712 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
715 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
716 int rm
= bits (insn
, 3, 6);
717 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
719 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
721 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
722 int src_reg
= (insn
& 0x78) >> 3;
723 regs
[dst_reg
] = regs
[src_reg
];
725 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno
= (insn
>> 8) & 0x7;
733 offset
= (insn
& 0xff) << 2;
734 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
736 if (stack
.store_would_trash (addr
))
739 stack
.store (addr
, 4, regs
[regno
]);
741 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
743 int rd
= bits (insn
, 0, 2);
744 int rn
= bits (insn
, 3, 5);
747 offset
= bits (insn
, 6, 10) << 2;
748 addr
= pv_add_constant (regs
[rn
], offset
);
750 if (stack
.store_would_trash (addr
))
753 stack
.store (addr
, 4, regs
[rd
]);
755 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
758 /* Ignore stores of argument registers to the stack. */
760 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
765 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
768 /* Similarly ignore single loads from the stack. */
770 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
775 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
778 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
779 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant
;
785 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
786 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
787 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
789 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
791 unsigned short inst2
;
793 inst2
= read_code_unsigned_integer (start
+ 2, 2,
794 byte_order_for_code
);
796 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
802 int j1
, j2
, imm1
, imm2
;
804 imm1
= sbits (insn
, 0, 10);
805 imm2
= bits (inst2
, 0, 10);
806 j1
= bit (inst2
, 13);
807 j2
= bit (inst2
, 11);
809 offset
= ((imm1
<< 12) + (imm2
<< 1));
810 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
812 nextpc
= start
+ 4 + offset
;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2
, 12) == 0)
815 nextpc
= nextpc
& 0xfffffffc;
817 if (!skip_prologue_function (gdbarch
, nextpc
,
818 bit (inst2
, 12) != 0))
822 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
824 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
826 pv_t addr
= regs
[bits (insn
, 0, 3)];
829 if (stack
.store_would_trash (addr
))
832 /* Calculate offsets of saved registers. */
833 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
834 if (inst2
& (1 << regno
))
836 addr
= pv_add_constant (addr
, -4);
837 stack
.store (addr
, 4, regs
[regno
]);
841 regs
[bits (insn
, 0, 3)] = addr
;
844 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
846 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
848 int regno1
= bits (inst2
, 12, 15);
849 int regno2
= bits (inst2
, 8, 11);
850 pv_t addr
= regs
[bits (insn
, 0, 3)];
852 offset
= inst2
& 0xff;
854 addr
= pv_add_constant (addr
, offset
);
856 addr
= pv_add_constant (addr
, -offset
);
858 if (stack
.store_would_trash (addr
))
861 stack
.store (addr
, 4, regs
[regno1
]);
862 stack
.store (pv_add_constant (addr
, 4),
866 regs
[bits (insn
, 0, 3)] = addr
;
869 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2
& 0x0c00) == 0x0c00
871 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
873 int regno
= bits (inst2
, 12, 15);
874 pv_t addr
= regs
[bits (insn
, 0, 3)];
876 offset
= inst2
& 0xff;
878 addr
= pv_add_constant (addr
, offset
);
880 addr
= pv_add_constant (addr
, -offset
);
882 if (stack
.store_would_trash (addr
))
885 stack
.store (addr
, 4, regs
[regno
]);
888 regs
[bits (insn
, 0, 3)] = addr
;
891 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
894 int regno
= bits (inst2
, 12, 15);
897 offset
= inst2
& 0xfff;
898 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
900 if (stack
.store_would_trash (addr
))
903 stack
.store (addr
, 4, regs
[regno
]);
906 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
908 /* Ignore stores of argument registers to the stack. */
911 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2
& 0x0d00) == 0x0c00
913 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
914 /* Ignore stores of argument registers to the stack. */
917 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
919 && (inst2
& 0x8000) == 0x0000
920 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
925 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
927 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
928 /* Similarly ignore dual loads from the stack. */
931 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2
& 0x0d00) == 0x0c00
933 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
934 /* Similarly ignore single loads from the stack. */
937 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
939 /* Similarly ignore single loads from the stack. */
942 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2
& 0x8000) == 0x0000)
945 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
946 | (bits (inst2
, 12, 14) << 8)
947 | bits (inst2
, 0, 7));
949 regs
[bits (inst2
, 8, 11)]
950 = pv_add_constant (regs
[bits (insn
, 0, 3)],
951 thumb_expand_immediate (imm
));
954 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2
& 0x8000) == 0x0000)
957 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
958 | (bits (inst2
, 12, 14) << 8)
959 | bits (inst2
, 0, 7));
961 regs
[bits (inst2
, 8, 11)]
962 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
965 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2
& 0x8000) == 0x0000)
968 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
969 | (bits (inst2
, 12, 14) << 8)
970 | bits (inst2
, 0, 7));
972 regs
[bits (inst2
, 8, 11)]
973 = pv_add_constant (regs
[bits (insn
, 0, 3)],
974 - (CORE_ADDR
) thumb_expand_immediate (imm
));
977 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2
& 0x8000) == 0x0000)
980 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
981 | (bits (inst2
, 12, 14) << 8)
982 | bits (inst2
, 0, 7));
984 regs
[bits (inst2
, 8, 11)]
985 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
988 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
990 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
991 | (bits (inst2
, 12, 14) << 8)
992 | bits (inst2
, 0, 7));
994 regs
[bits (inst2
, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm
));
998 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1003 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1006 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1007 && (inst2
& 0xf0f0) == 0)
1009 int dst_reg
= (inst2
& 0x0f00) >> 8;
1010 int src_reg
= inst2
& 0xf;
1011 regs
[dst_reg
] = regs
[src_reg
];
1014 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1016 /* Constant pool loads. */
1017 unsigned int constant
;
1020 offset
= bits (inst2
, 0, 11);
1022 loc
= start
+ 4 + offset
;
1024 loc
= start
+ 4 - offset
;
1026 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1027 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1030 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1032 /* Constant pool loads. */
1033 unsigned int constant
;
1036 offset
= bits (inst2
, 0, 7) << 2;
1038 loc
= start
+ 4 + offset
;
1040 loc
= start
+ 4 - offset
;
1042 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1043 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1045 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1046 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1049 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1051 /* Don't scan past anything that might change control flow. */
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc
= start
;
1063 else if (thumb_instruction_changes_pc (insn
))
1065 /* Don't scan past anything that might change control flow. */
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc
= start
;
1079 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch
, start
));
1082 if (unrecognized_pc
== 0)
1083 unrecognized_pc
= start
;
1086 return unrecognized_pc
;
1088 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache
->framereg
= ARM_FP_REGNUM
;
1092 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1094 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache
->framereg
= THUMB_FP_REGNUM
;
1098 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache
->framereg
= ARM_SP_REGNUM
;
1104 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1107 for (i
= 0; i
< 16; i
++)
1108 if (stack
.find_reg (gdbarch
, i
, &offset
))
1109 cache
->saved_regs
[i
].addr
= offset
;
1111 return unrecognized_pc
;
1115 /* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1122 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1123 unsigned int *destreg
, int *offset
)
1125 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1126 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1127 unsigned int low
, high
, address
;
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc
, 2, byte_order_for_code
);
1135 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1137 *destreg
= bits (insn1
, 8, 10);
1139 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1140 address
= read_memory_unsigned_integer (address
, 4,
1141 byte_order_for_code
);
1143 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1148 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1151 = read_code_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1153 = read_code_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1155 /* movt Rd, #const */
1156 if ((insn1
& 0xfbc0) == 0xf2c0)
1158 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1159 *destreg
= bits (insn2
, 8, 11);
1161 address
= (high
<< 16 | low
);
1168 = read_code_unsigned_integer (pc
, 4, byte_order_for_code
);
1170 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1172 address
= bits (insn
, 0, 11) + pc
+ 8;
1173 address
= read_memory_unsigned_integer (address
, 4,
1174 byte_order_for_code
);
1176 *destreg
= bits (insn
, 12, 15);
1179 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1181 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1184 = read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1186 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1188 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1189 *destreg
= bits (insn
, 12, 15);
1191 address
= (high
<< 16 | low
);
1199 /* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1222 .word __stack_chk_guard
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1230 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1232 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1233 unsigned int basereg
;
1234 struct bound_minimal_symbol stack_chk_guard
;
1236 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1239 /* Try to parse the instructions in Step 1. */
1240 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1245 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard
.minsym
== NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
), "__stack_chk_guard"))
1254 unsigned int destreg
;
1256 = read_code_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn
& 0xf800) != 0x6800)
1261 if (bits (insn
, 3, 5) != basereg
)
1263 destreg
= bits (insn
, 0, 2);
1265 insn
= read_code_unsigned_integer (pc
+ offset
+ 2, 2,
1266 byte_order_for_code
);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn
& 0xf800) != 0x6000)
1270 if (destreg
!= bits (insn
, 0, 2))
1275 unsigned int destreg
;
1277 = read_code_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn
& 0x0e500000) != 0x04100000)
1282 if (bits (insn
, 16, 19) != basereg
)
1284 destreg
= bits (insn
, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn
= read_code_unsigned_integer (pc
+ offset
+ 4,
1287 4, byte_order_for_code
);
1288 if ((insn
& 0x0e500000) != 0x04000000)
1290 if (bits (insn
, 12, 15) != destreg
)
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1296 return pc
+ offset
+ 4;
1298 return pc
+ offset
+ 8;
1301 /* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1304 The APCS (ARM Procedure Call Standard) defines the following
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1317 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1319 CORE_ADDR func_addr
, limit_pc
;
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1324 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch
, func_addr
);
1328 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1330 if (post_prologue_pc
)
1332 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1343 || COMPUNIT_PRODUCER (cust
) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust
), "clang ")))
1346 return post_prologue_pc
;
1348 if (post_prologue_pc
!= 0)
1350 CORE_ADDR analyzed_limit
;
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1361 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1362 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1363 post_prologue_pc
, NULL
);
1365 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1366 post_prologue_pc
, NULL
);
1368 if (analyzed_limit
!= post_prologue_pc
)
1371 return post_prologue_pc
;
1375 /* Can't determine prologue from the symbol table, need to examine
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1384 limit_pc
= pc
+ 64; /* Magic. */
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch
, pc
))
1389 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1391 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1395 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1417 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1418 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1420 CORE_ADDR prologue_start
;
1421 CORE_ADDR prologue_end
;
1423 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1426 /* See comment in arm_scan_prologue for an explanation of
1428 if (prologue_end
> prologue_start
+ 64)
1430 prologue_end
= prologue_start
+ 64;
1434 /* We're in the boondocks: we have no idea where the start of the
1438 prologue_end
= std::min (prologue_end
, prev_pc
);
1440 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1443 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1447 arm_instruction_restores_sp (unsigned int insn
)
1449 if (bits (insn
, 28, 31) != INST_NV
)
1451 if ((insn
& 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn
& 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn
& 0x0ffffff0) == 0x01a0d000
1457 || (insn
& 0x0fff0000) == 0x08bd0000
1459 || (insn
& 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1467 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1478 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1479 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1480 struct arm_prologue_cache
*cache
)
1482 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1484 CORE_ADDR offset
, current_pc
;
1485 pv_t regs
[ARM_FPS_REGNUM
];
1486 CORE_ADDR unrecognized_pc
= 0;
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1498 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1499 regs
[regno
] = pv_register (regno
, 0);
1500 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1502 for (current_pc
= prologue_start
;
1503 current_pc
< prologue_end
;
1507 = read_code_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1509 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1511 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1514 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1517 unsigned imm
= insn
& 0xff; /* immediate value */
1518 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1519 int rd
= bits (insn
, 12, 15);
1520 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1521 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1524 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1527 unsigned imm
= insn
& 0xff; /* immediate value */
1528 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1529 int rd
= bits (insn
, 12, 15);
1530 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1531 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1534 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1537 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1539 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1540 stack
.store (regs
[ARM_SP_REGNUM
], 4,
1541 regs
[bits (insn
, 12, 15)]);
1544 else if ((insn
& 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1547 stmfd sp!, {a1, a2, a3, a4} */
1549 int mask
= insn
& 0xffff;
1551 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1554 /* Calculate offsets of saved registers. */
1555 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1556 if (mask
& (1 << regno
))
1559 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1560 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1563 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1570 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1579 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1584 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1586 unsigned imm
= insn
& 0xff; /* immediate value */
1587 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1588 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1589 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1591 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1593 unsigned imm
= insn
& 0xff; /* immediate value */
1594 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1595 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1596 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1598 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1600 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1602 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1605 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1606 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1607 stack
.store (regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1609 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1611 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1613 int n_saved_fp_regs
;
1614 unsigned int fp_start_reg
, fp_bound_reg
;
1616 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1619 if ((insn
& 0x800) == 0x800) /* N0 is set */
1621 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs
= 3;
1624 n_saved_fp_regs
= 1;
1628 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs
= 2;
1631 n_saved_fp_regs
= 4;
1634 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1635 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1636 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1638 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1639 stack
.store (regs
[ARM_SP_REGNUM
], 12,
1640 regs
[fp_start_reg
++]);
1643 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1648 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1650 if (skip_prologue_function (gdbarch
, dest
, 0))
1655 else if ((insn
& 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn
))
1658 /* Don't scan past anything that might change control flow. */
1660 else if (arm_instruction_restores_sp (insn
))
1662 /* Don't scan past the epilogue. */
1665 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1670 else if ((insn
& 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1672 /* Similarly ignore single loads from the stack. */
1674 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1686 unrecognized_pc
= current_pc
;
1694 if (unrecognized_pc
== 0)
1695 unrecognized_pc
= current_pc
;
1699 int framereg
, framesize
;
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1705 /* Frame pointer is fp. */
1706 framereg
= ARM_FP_REGNUM
;
1707 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg
= ARM_SP_REGNUM
;
1713 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1716 cache
->framereg
= framereg
;
1717 cache
->framesize
= framesize
;
1719 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1720 if (stack
.find_reg (gdbarch
, regno
, &offset
))
1721 cache
->saved_regs
[regno
].addr
= offset
;
1725 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch
, unrecognized_pc
));
1728 return unrecognized_pc
;
1732 arm_scan_prologue (struct frame_info
*this_frame
,
1733 struct arm_prologue_cache
*cache
)
1735 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1736 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1737 CORE_ADDR prologue_start
, prologue_end
;
1738 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1739 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1741 /* Assume there is no frame until proven otherwise. */
1742 cache
->framereg
= ARM_SP_REGNUM
;
1743 cache
->framesize
= 0;
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame
))
1748 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1789 if (prologue_end
> prologue_start
+ 64)
1791 prologue_end
= prologue_start
+ 64; /* See above. */
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc
;
1802 ULONGEST return_value
;
1804 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1805 if (!safe_read_memory_unsigned_integer (frame_loc
, 4, byte_order
,
1810 prologue_start
= gdbarch_addr_bits_remove
1811 (gdbarch
, return_value
) - 8;
1812 prologue_end
= prologue_start
+ 64; /* See above. */
1816 if (prev_pc
< prologue_end
)
1817 prologue_end
= prev_pc
;
1819 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1822 static struct arm_prologue_cache
*
1823 arm_make_prologue_cache (struct frame_info
*this_frame
)
1826 struct arm_prologue_cache
*cache
;
1827 CORE_ADDR unwound_fp
;
1829 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
1830 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
1832 arm_scan_prologue (this_frame
, cache
);
1834 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
1835 if (unwound_fp
== 0)
1838 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
1840 /* Calculate actual addresses of saved registers using offsets
1841 determined by arm_scan_prologue. */
1842 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
1843 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
1844 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
1849 /* Implementation of the stop_reason hook for arm_prologue frames. */
1851 static enum unwind_stop_reason
1852 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
1855 struct arm_prologue_cache
*cache
;
1858 if (*this_cache
== NULL
)
1859 *this_cache
= arm_make_prologue_cache (this_frame
);
1860 cache
= (struct arm_prologue_cache
*) *this_cache
;
1862 /* This is meant to halt the backtrace at "_start". */
1863 pc
= get_frame_pc (this_frame
);
1864 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
1865 return UNWIND_OUTERMOST
;
1867 /* If we've hit a wall, stop. */
1868 if (cache
->prev_sp
== 0)
1869 return UNWIND_OUTERMOST
;
1871 return UNWIND_NO_REASON
;
1874 /* Our frame ID for a normal frame is the current function's starting PC
1875 and the caller's SP when we were called. */
1878 arm_prologue_this_id (struct frame_info
*this_frame
,
1880 struct frame_id
*this_id
)
1882 struct arm_prologue_cache
*cache
;
1886 if (*this_cache
== NULL
)
1887 *this_cache
= arm_make_prologue_cache (this_frame
);
1888 cache
= (struct arm_prologue_cache
*) *this_cache
;
1890 /* Use function start address as part of the frame ID. If we cannot
1891 identify the start address (due to missing symbol information),
1892 fall back to just using the current PC. */
1893 pc
= get_frame_pc (this_frame
);
1894 func
= get_frame_func (this_frame
);
1898 id
= frame_id_build (cache
->prev_sp
, func
);
1902 static struct value
*
1903 arm_prologue_prev_register (struct frame_info
*this_frame
,
1907 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1908 struct arm_prologue_cache
*cache
;
1910 if (*this_cache
== NULL
)
1911 *this_cache
= arm_make_prologue_cache (this_frame
);
1912 cache
= (struct arm_prologue_cache
*) *this_cache
;
1914 /* If we are asked to unwind the PC, then we need to return the LR
1915 instead. The prologue may save PC, but it will point into this
1916 frame's prologue, not the next frame's resume location. Also
1917 strip the saved T bit. A valid LR may have the low bit set, but
1918 a valid PC never does. */
1919 if (prev_regnum
== ARM_PC_REGNUM
)
1923 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1924 return frame_unwind_got_constant (this_frame
, prev_regnum
,
1925 arm_addr_bits_remove (gdbarch
, lr
));
1928 /* SP is generally not saved to the stack, but this frame is
1929 identified by the next frame's stack pointer at the time of the call.
1930 The value was already reconstructed into PREV_SP. */
1931 if (prev_regnum
== ARM_SP_REGNUM
)
1932 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
1934 /* The CPSR may have been changed by the call instruction and by the
1935 called function. The only bit we can reconstruct is the T bit,
1936 by checking the low bit of LR as of the call. This is a reliable
1937 indicator of Thumb-ness except for some ARM v4T pre-interworking
1938 Thumb code, which could get away with a clear low bit as long as
1939 the called function did not use bx. Guess that all other
1940 bits are unchanged; the condition flags are presumably lost,
1941 but the processor status is likely valid. */
1942 if (prev_regnum
== ARM_PS_REGNUM
)
1945 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
1947 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
1948 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1949 if (IS_THUMB_ADDR (lr
))
1953 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
1956 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
1960 struct frame_unwind arm_prologue_unwind
= {
1962 arm_prologue_unwind_stop_reason
,
1963 arm_prologue_this_id
,
1964 arm_prologue_prev_register
,
1966 default_frame_sniffer
1969 /* Maintain a list of ARM exception table entries per objfile, similar to the
1970 list of mapping symbols. We only cache entries for standard ARM-defined
1971 personality routines; the cache will contain only the frame unwinding
1972 instructions associated with the entry (not the descriptors). */
1974 static const struct objfile_data
*arm_exidx_data_key
;
1976 struct arm_exidx_entry
1981 typedef struct arm_exidx_entry arm_exidx_entry_s
;
1982 DEF_VEC_O(arm_exidx_entry_s
);
1984 struct arm_exidx_data
1986 VEC(arm_exidx_entry_s
) **section_maps
;
1990 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
1992 struct arm_exidx_data
*data
= (struct arm_exidx_data
*) arg
;
1995 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
1996 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2000 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2001 const struct arm_exidx_entry
*rhs
)
2003 return lhs
->addr
< rhs
->addr
;
2006 static struct obj_section
*
2007 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2009 struct obj_section
*osect
;
2011 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2012 if (bfd_get_section_flags (objfile
->obfd
,
2013 osect
->the_bfd_section
) & SEC_ALLOC
)
2015 bfd_vma start
, size
;
2016 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2017 size
= bfd_get_section_size (osect
->the_bfd_section
);
2019 if (start
<= vma
&& vma
< start
+ size
)
2026 /* Parse contents of exception table and exception index sections
2027 of OBJFILE, and fill in the exception table entry cache.
2029 For each entry that refers to a standard ARM-defined personality
2030 routine, extract the frame unwinding instructions (from either
2031 the index or the table section). The unwinding instructions
2033 - extracting them from the rest of the table data
2034 - converting to host endianness
2035 - appending the implicit 0xb0 ("Finish") code
2037 The extracted and normalized instructions are stored for later
2038 retrieval by the arm_find_exidx_entry routine. */
2041 arm_exidx_new_objfile (struct objfile
*objfile
)
2043 struct arm_exidx_data
*data
;
2044 asection
*exidx
, *extab
;
2045 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2048 /* If we've already touched this file, do nothing. */
2049 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2052 /* Read contents of exception table and index. */
2053 exidx
= bfd_get_section_by_name (objfile
->obfd
, ELF_STRING_ARM_unwind
);
2054 gdb::byte_vector exidx_data
;
2057 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2058 exidx_data
.resize (bfd_get_section_size (exidx
));
2060 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2061 exidx_data
.data (), 0,
2062 exidx_data
.size ()))
2066 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2067 gdb::byte_vector extab_data
;
2070 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2071 extab_data
.resize (bfd_get_section_size (extab
));
2073 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2074 extab_data
.data (), 0,
2075 extab_data
.size ()))
2079 /* Allocate exception table data structure. */
2080 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2081 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2082 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2083 objfile
->obfd
->section_count
,
2084 VEC(arm_exidx_entry_s
) *);
2086 /* Fill in exception table. */
2087 for (i
= 0; i
< exidx_data
.size () / 8; i
++)
2089 struct arm_exidx_entry new_exidx_entry
;
2090 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
.data () + i
* 8);
2091 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
,
2092 exidx_data
.data () + i
* 8 + 4);
2093 bfd_vma addr
= 0, word
= 0;
2094 int n_bytes
= 0, n_words
= 0;
2095 struct obj_section
*sec
;
2096 gdb_byte
*entry
= NULL
;
2098 /* Extract address of start of function. */
2099 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2100 idx
+= exidx_vma
+ i
* 8;
2102 /* Find section containing function and compute section offset. */
2103 sec
= arm_obj_section_from_vma (objfile
, idx
);
2106 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2108 /* Determine address of exception table entry. */
2111 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2113 else if ((val
& 0xff000000) == 0x80000000)
2115 /* Exception table entry embedded in .ARM.exidx
2116 -- must be short form. */
2120 else if (!(val
& 0x80000000))
2122 /* Exception table entry in .ARM.extab. */
2123 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2124 addr
+= exidx_vma
+ i
* 8 + 4;
2126 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_data
.size ())
2128 word
= bfd_h_get_32 (objfile
->obfd
,
2129 extab_data
.data () + addr
- extab_vma
);
2132 if ((word
& 0xff000000) == 0x80000000)
2137 else if ((word
& 0xff000000) == 0x81000000
2138 || (word
& 0xff000000) == 0x82000000)
2142 n_words
= ((word
>> 16) & 0xff);
2144 else if (!(word
& 0x80000000))
2147 struct obj_section
*pers_sec
;
2148 int gnu_personality
= 0;
2150 /* Custom personality routine. */
2151 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2152 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2154 /* Check whether we've got one of the variants of the
2155 GNU personality routines. */
2156 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2159 static const char *personality
[] =
2161 "__gcc_personality_v0",
2162 "__gxx_personality_v0",
2163 "__gcj_personality_v0",
2164 "__gnu_objc_personality_v0",
2168 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2171 for (k
= 0; personality
[k
]; k
++)
2172 if (lookup_minimal_symbol_by_pc_name
2173 (pc
, personality
[k
], objfile
))
2175 gnu_personality
= 1;
2180 /* If so, the next word contains a word count in the high
2181 byte, followed by the same unwind instructions as the
2182 pre-defined forms. */
2184 && addr
+ 4 <= extab_vma
+ extab_data
.size ())
2186 word
= bfd_h_get_32 (objfile
->obfd
,
2188 + addr
- extab_vma
));
2191 n_words
= ((word
>> 24) & 0xff);
2197 /* Sanity check address. */
2199 if (addr
< extab_vma
2200 || addr
+ 4 * n_words
> extab_vma
+ extab_data
.size ())
2201 n_words
= n_bytes
= 0;
2203 /* The unwind instructions reside in WORD (only the N_BYTES least
2204 significant bytes are valid), followed by N_WORDS words in the
2205 extab section starting at ADDR. */
2206 if (n_bytes
|| n_words
)
2209 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2210 n_bytes
+ n_words
* 4 + 1);
2213 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2217 word
= bfd_h_get_32 (objfile
->obfd
,
2218 extab_data
.data () + addr
- extab_vma
);
2221 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2222 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2223 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2224 *p
++ = (gdb_byte
) (word
& 0xff);
2227 /* Implied "Finish" to terminate the list. */
2231 /* Push entry onto vector. They are guaranteed to always
2232 appear in order of increasing addresses. */
2233 new_exidx_entry
.addr
= idx
;
2234 new_exidx_entry
.entry
= entry
;
2235 VEC_safe_push (arm_exidx_entry_s
,
2236 data
->section_maps
[sec
->the_bfd_section
->index
],
2241 /* Search for the exception table entry covering MEMADDR. If one is found,
2242 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2243 set *START to the start of the region covered by this entry. */
2246 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2248 struct obj_section
*sec
;
2250 sec
= find_pc_section (memaddr
);
2253 struct arm_exidx_data
*data
;
2254 VEC(arm_exidx_entry_s
) *map
;
2255 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2258 data
= ((struct arm_exidx_data
*)
2259 objfile_data (sec
->objfile
, arm_exidx_data_key
));
2262 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2263 if (!VEC_empty (arm_exidx_entry_s
, map
))
2265 struct arm_exidx_entry
*map_sym
;
2267 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2268 arm_compare_exidx_entries
);
2270 /* VEC_lower_bound finds the earliest ordered insertion
2271 point. If the following symbol starts at this exact
2272 address, we use that; otherwise, the preceding
2273 exception table entry covers this address. */
2274 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2276 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2277 if (map_sym
->addr
== map_key
.addr
)
2280 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2281 return map_sym
->entry
;
2287 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2289 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2290 return map_sym
->entry
;
2299 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2300 instruction list from the ARM exception table entry ENTRY, allocate and
2301 return a prologue cache structure describing how to unwind this frame.
2303 Return NULL if the unwinding instruction list contains a "spare",
2304 "reserved" or "refuse to unwind" instruction as defined in section
2305 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2306 for the ARM Architecture" document. */
2308 static struct arm_prologue_cache
*
2309 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2314 struct arm_prologue_cache
*cache
;
2315 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2316 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2322 /* Whenever we reload SP, we actually have to retrieve its
2323 actual value in the current frame. */
2326 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2328 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2329 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2333 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2334 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2340 /* Decode next unwind instruction. */
2343 if ((insn
& 0xc0) == 0)
2345 int offset
= insn
& 0x3f;
2346 vsp
+= (offset
<< 2) + 4;
2348 else if ((insn
& 0xc0) == 0x40)
2350 int offset
= insn
& 0x3f;
2351 vsp
-= (offset
<< 2) + 4;
2353 else if ((insn
& 0xf0) == 0x80)
2355 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2358 /* The special case of an all-zero mask identifies
2359 "Refuse to unwind". We return NULL to fall back
2360 to the prologue analyzer. */
2364 /* Pop registers r4..r15 under mask. */
2365 for (i
= 0; i
< 12; i
++)
2366 if (mask
& (1 << i
))
2368 cache
->saved_regs
[4 + i
].addr
= vsp
;
2372 /* Special-case popping SP -- we need to reload vsp. */
2373 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2376 else if ((insn
& 0xf0) == 0x90)
2378 int reg
= insn
& 0xf;
2380 /* Reserved cases. */
2381 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2384 /* Set SP from another register and mark VSP for reload. */
2385 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2388 else if ((insn
& 0xf0) == 0xa0)
2390 int count
= insn
& 0x7;
2391 int pop_lr
= (insn
& 0x8) != 0;
2394 /* Pop r4..r[4+count]. */
2395 for (i
= 0; i
<= count
; i
++)
2397 cache
->saved_regs
[4 + i
].addr
= vsp
;
2401 /* If indicated by flag, pop LR as well. */
2404 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2408 else if (insn
== 0xb0)
2410 /* We could only have updated PC by popping into it; if so, it
2411 will show up as address. Otherwise, copy LR into PC. */
2412 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2413 cache
->saved_regs
[ARM_PC_REGNUM
]
2414 = cache
->saved_regs
[ARM_LR_REGNUM
];
2419 else if (insn
== 0xb1)
2421 int mask
= *entry
++;
2424 /* All-zero mask and mask >= 16 is "spare". */
2425 if (mask
== 0 || mask
>= 16)
2428 /* Pop r0..r3 under mask. */
2429 for (i
= 0; i
< 4; i
++)
2430 if (mask
& (1 << i
))
2432 cache
->saved_regs
[i
].addr
= vsp
;
2436 else if (insn
== 0xb2)
2438 ULONGEST offset
= 0;
2443 offset
|= (*entry
& 0x7f) << shift
;
2446 while (*entry
++ & 0x80);
2448 vsp
+= 0x204 + (offset
<< 2);
2450 else if (insn
== 0xb3)
2452 int start
= *entry
>> 4;
2453 int count
= (*entry
++) & 0xf;
2456 /* Only registers D0..D15 are valid here. */
2457 if (start
+ count
>= 16)
2460 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2461 for (i
= 0; i
<= count
; i
++)
2463 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2467 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2470 else if ((insn
& 0xf8) == 0xb8)
2472 int count
= insn
& 0x7;
2475 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2476 for (i
= 0; i
<= count
; i
++)
2478 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2482 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2485 else if (insn
== 0xc6)
2487 int start
= *entry
>> 4;
2488 int count
= (*entry
++) & 0xf;
2491 /* Only registers WR0..WR15 are valid. */
2492 if (start
+ count
>= 16)
2495 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2496 for (i
= 0; i
<= count
; i
++)
2498 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2502 else if (insn
== 0xc7)
2504 int mask
= *entry
++;
2507 /* All-zero mask and mask >= 16 is "spare". */
2508 if (mask
== 0 || mask
>= 16)
2511 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2512 for (i
= 0; i
< 4; i
++)
2513 if (mask
& (1 << i
))
2515 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2519 else if ((insn
& 0xf8) == 0xc0)
2521 int count
= insn
& 0x7;
2524 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2525 for (i
= 0; i
<= count
; i
++)
2527 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2531 else if (insn
== 0xc8)
2533 int start
= *entry
>> 4;
2534 int count
= (*entry
++) & 0xf;
2537 /* Only registers D0..D31 are valid. */
2538 if (start
+ count
>= 16)
2541 /* Pop VFP double-precision registers
2542 D[16+start]..D[16+start+count]. */
2543 for (i
= 0; i
<= count
; i
++)
2545 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2549 else if (insn
== 0xc9)
2551 int start
= *entry
>> 4;
2552 int count
= (*entry
++) & 0xf;
2555 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2556 for (i
= 0; i
<= count
; i
++)
2558 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2562 else if ((insn
& 0xf8) == 0xd0)
2564 int count
= insn
& 0x7;
2567 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2568 for (i
= 0; i
<= count
; i
++)
2570 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2576 /* Everything else is "spare". */
2581 /* If we restore SP from a register, assume this was the frame register.
2582 Otherwise just fall back to SP as frame register. */
2583 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2584 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2586 cache
->framereg
= ARM_SP_REGNUM
;
2588 /* Determine offset to previous frame. */
2590 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2592 /* We already got the previous SP. */
2593 cache
->prev_sp
= vsp
;
2598 /* Unwinding via ARM exception table entries. Note that the sniffer
2599 already computes a filled-in prologue cache, which is then used
2600 with the same arm_prologue_this_id and arm_prologue_prev_register
2601 routines also used for prologue-parsing based unwinding. */
2604 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2605 struct frame_info
*this_frame
,
2606 void **this_prologue_cache
)
2608 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2609 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2610 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2611 struct arm_prologue_cache
*cache
;
2614 /* See if we have an ARM exception table entry covering this address. */
2615 addr_in_block
= get_frame_address_in_block (this_frame
);
2616 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2620 /* The ARM exception table does not describe unwind information
2621 for arbitrary PC values, but is guaranteed to be correct only
2622 at call sites. We have to decide here whether we want to use
2623 ARM exception table information for this frame, or fall back
2624 to using prologue parsing. (Note that if we have DWARF CFI,
2625 this sniffer isn't even called -- CFI is always preferred.)
2627 Before we make this decision, however, we check whether we
2628 actually have *symbol* information for the current frame.
2629 If not, prologue parsing would not work anyway, so we might
2630 as well use the exception table and hope for the best. */
2631 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2635 /* If the next frame is "normal", we are at a call site in this
2636 frame, so exception information is guaranteed to be valid. */
2637 if (get_next_frame (this_frame
)
2638 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2641 /* We also assume exception information is valid if we're currently
2642 blocked in a system call. The system library is supposed to
2643 ensure this, so that e.g. pthread cancellation works. */
2644 if (arm_frame_is_thumb (this_frame
))
2648 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 2,
2649 2, byte_order_for_code
, &insn
)
2650 && (insn
& 0xff00) == 0xdf00 /* svc */)
2657 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 4,
2658 4, byte_order_for_code
, &insn
)
2659 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2663 /* Bail out if we don't know that exception information is valid. */
2667 /* The ARM exception index does not mark the *end* of the region
2668 covered by the entry, and some functions will not have any entry.
2669 To correctly recognize the end of the covered region, the linker
2670 should have inserted dummy records with a CANTUNWIND marker.
2672 Unfortunately, current versions of GNU ld do not reliably do
2673 this, and thus we may have found an incorrect entry above.
2674 As a (temporary) sanity check, we only use the entry if it
2675 lies *within* the bounds of the function. Note that this check
2676 might reject perfectly valid entries that just happen to cover
2677 multiple functions; therefore this check ought to be removed
2678 once the linker is fixed. */
2679 if (func_start
> exidx_region
)
2683 /* Decode the list of unwinding instructions into a prologue cache.
2684 Note that this may fail due to e.g. a "refuse to unwind" code. */
2685 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2689 *this_prologue_cache
= cache
;
2693 struct frame_unwind arm_exidx_unwind
= {
2695 default_frame_unwind_stop_reason
,
2696 arm_prologue_this_id
,
2697 arm_prologue_prev_register
,
2699 arm_exidx_unwind_sniffer
2702 static struct arm_prologue_cache
*
2703 arm_make_epilogue_frame_cache (struct frame_info
*this_frame
)
2705 struct arm_prologue_cache
*cache
;
2708 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2709 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2711 /* Still rely on the offset calculated from prologue. */
2712 arm_scan_prologue (this_frame
, cache
);
2714 /* Since we are in epilogue, the SP has been restored. */
2715 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2717 /* Calculate actual addresses of saved registers using offsets
2718 determined by arm_scan_prologue. */
2719 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2720 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2721 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2726 /* Implementation of function hook 'this_id' in
2727 'struct frame_uwnind' for epilogue unwinder. */
2730 arm_epilogue_frame_this_id (struct frame_info
*this_frame
,
2732 struct frame_id
*this_id
)
2734 struct arm_prologue_cache
*cache
;
2737 if (*this_cache
== NULL
)
2738 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2739 cache
= (struct arm_prologue_cache
*) *this_cache
;
2741 /* Use function start address as part of the frame ID. If we cannot
2742 identify the start address (due to missing symbol information),
2743 fall back to just using the current PC. */
2744 pc
= get_frame_pc (this_frame
);
2745 func
= get_frame_func (this_frame
);
2749 (*this_id
) = frame_id_build (cache
->prev_sp
, pc
);
2752 /* Implementation of function hook 'prev_register' in
2753 'struct frame_uwnind' for epilogue unwinder. */
2755 static struct value
*
2756 arm_epilogue_frame_prev_register (struct frame_info
*this_frame
,
2757 void **this_cache
, int regnum
)
2759 if (*this_cache
== NULL
)
2760 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2762 return arm_prologue_prev_register (this_frame
, this_cache
, regnum
);
2765 static int arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
,
2767 static int thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
,
2770 /* Implementation of function hook 'sniffer' in
2771 'struct frame_uwnind' for epilogue unwinder. */
2774 arm_epilogue_frame_sniffer (const struct frame_unwind
*self
,
2775 struct frame_info
*this_frame
,
2776 void **this_prologue_cache
)
2778 if (frame_relative_level (this_frame
) == 0)
2780 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2781 CORE_ADDR pc
= get_frame_pc (this_frame
);
2783 if (arm_frame_is_thumb (this_frame
))
2784 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
2786 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
2792 /* Frame unwinder from epilogue. */
2794 static const struct frame_unwind arm_epilogue_frame_unwind
=
2797 default_frame_unwind_stop_reason
,
2798 arm_epilogue_frame_this_id
,
2799 arm_epilogue_frame_prev_register
,
2801 arm_epilogue_frame_sniffer
,
2804 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2805 trampoline, return the target PC. Otherwise return 0.
2807 void call0a (char c, short s, int i, long l) {}
2811 (*pointer_to_call0a) (c, s, i, l);
2814 Instead of calling a stub library function _call_via_xx (xx is
2815 the register name), GCC may inline the trampoline in the object
2816 file as below (register r2 has the address of call0a).
2819 .type main, %function
2828 The trampoline 'bx r2' doesn't belong to main. */
2831 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2833 /* The heuristics of recognizing such trampoline is that FRAME is
2834 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2835 if (arm_frame_is_thumb (frame
))
2839 if (target_read_memory (pc
, buf
, 2) == 0)
2841 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2842 enum bfd_endian byte_order_for_code
2843 = gdbarch_byte_order_for_code (gdbarch
);
2845 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2847 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2850 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2852 /* Clear the LSB so that gdb core sets step-resume
2853 breakpoint at the right address. */
2854 return UNMAKE_THUMB_ADDR (dest
);
2862 static struct arm_prologue_cache
*
2863 arm_make_stub_cache (struct frame_info
*this_frame
)
2865 struct arm_prologue_cache
*cache
;
2867 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2868 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2870 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2875 /* Our frame ID for a stub frame is the current SP and LR. */
2878 arm_stub_this_id (struct frame_info
*this_frame
,
2880 struct frame_id
*this_id
)
2882 struct arm_prologue_cache
*cache
;
2884 if (*this_cache
== NULL
)
2885 *this_cache
= arm_make_stub_cache (this_frame
);
2886 cache
= (struct arm_prologue_cache
*) *this_cache
;
2888 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2892 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2893 struct frame_info
*this_frame
,
2894 void **this_prologue_cache
)
2896 CORE_ADDR addr_in_block
;
2898 CORE_ADDR pc
, start_addr
;
2901 addr_in_block
= get_frame_address_in_block (this_frame
);
2902 pc
= get_frame_pc (this_frame
);
2903 if (in_plt_section (addr_in_block
)
2904 /* We also use the stub winder if the target memory is unreadable
2905 to avoid having the prologue unwinder trying to read it. */
2906 || target_read_memory (pc
, dummy
, 4) != 0)
2909 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2910 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2916 struct frame_unwind arm_stub_unwind
= {
2918 default_frame_unwind_stop_reason
,
2920 arm_prologue_prev_register
,
2922 arm_stub_unwind_sniffer
2925 /* Put here the code to store, into CACHE->saved_regs, the addresses
2926 of the saved registers of frame described by THIS_FRAME. CACHE is
2929 static struct arm_prologue_cache
*
2930 arm_m_exception_cache (struct frame_info
*this_frame
)
2932 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2933 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
2934 struct arm_prologue_cache
*cache
;
2935 CORE_ADDR unwound_sp
;
2938 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2939 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2941 unwound_sp
= get_frame_register_unsigned (this_frame
,
2944 /* The hardware saves eight 32-bit words, comprising xPSR,
2945 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2946 "B1.5.6 Exception entry behavior" in
2947 "ARMv7-M Architecture Reference Manual". */
2948 cache
->saved_regs
[0].addr
= unwound_sp
;
2949 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
2950 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
2951 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
2952 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
2953 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
2954 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
2955 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
2957 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2958 aligner between the top of the 32-byte stack frame and the
2959 previous context's stack pointer. */
2960 cache
->prev_sp
= unwound_sp
+ 32;
2961 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
2962 && (xpsr
& (1 << 9)) != 0)
2963 cache
->prev_sp
+= 4;
2968 /* Implementation of function hook 'this_id' in
2969 'struct frame_uwnind'. */
2972 arm_m_exception_this_id (struct frame_info
*this_frame
,
2974 struct frame_id
*this_id
)
2976 struct arm_prologue_cache
*cache
;
2978 if (*this_cache
== NULL
)
2979 *this_cache
= arm_m_exception_cache (this_frame
);
2980 cache
= (struct arm_prologue_cache
*) *this_cache
;
2982 /* Our frame ID for a stub frame is the current SP and LR. */
2983 *this_id
= frame_id_build (cache
->prev_sp
,
2984 get_frame_pc (this_frame
));
2987 /* Implementation of function hook 'prev_register' in
2988 'struct frame_uwnind'. */
2990 static struct value
*
2991 arm_m_exception_prev_register (struct frame_info
*this_frame
,
2995 struct arm_prologue_cache
*cache
;
2997 if (*this_cache
== NULL
)
2998 *this_cache
= arm_m_exception_cache (this_frame
);
2999 cache
= (struct arm_prologue_cache
*) *this_cache
;
3001 /* The value was already reconstructed into PREV_SP. */
3002 if (prev_regnum
== ARM_SP_REGNUM
)
3003 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3006 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3010 /* Implementation of function hook 'sniffer' in
3011 'struct frame_uwnind'. */
3014 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3015 struct frame_info
*this_frame
,
3016 void **this_prologue_cache
)
3018 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3020 /* No need to check is_m; this sniffer is only registered for
3021 M-profile architectures. */
3023 /* Check if exception frame returns to a magic PC value. */
3024 return arm_m_addr_is_magic (this_pc
);
3027 /* Frame unwinder for M-profile exceptions. */
3029 struct frame_unwind arm_m_exception_unwind
=
3032 default_frame_unwind_stop_reason
,
3033 arm_m_exception_this_id
,
3034 arm_m_exception_prev_register
,
3036 arm_m_exception_unwind_sniffer
3040 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3042 struct arm_prologue_cache
*cache
;
3044 if (*this_cache
== NULL
)
3045 *this_cache
= arm_make_prologue_cache (this_frame
);
3046 cache
= (struct arm_prologue_cache
*) *this_cache
;
3048 return cache
->prev_sp
- cache
->framesize
;
3051 struct frame_base arm_normal_base
= {
3052 &arm_prologue_unwind
,
3053 arm_normal_frame_base
,
3054 arm_normal_frame_base
,
3055 arm_normal_frame_base
3058 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3059 dummy frame. The frame ID's base needs to match the TOS value
3060 saved by save_dummy_frame_tos() and returned from
3061 arm_push_dummy_call, and the PC needs to match the dummy frame's
3064 static struct frame_id
3065 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3067 return frame_id_build (get_frame_register_unsigned (this_frame
,
3069 get_frame_pc (this_frame
));
3072 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3073 be used to construct the previous frame's ID, after looking up the
3074 containing function). */
3077 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3080 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
3081 return arm_addr_bits_remove (gdbarch
, pc
);
3085 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3087 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3090 static struct value
*
3091 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3094 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3096 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3101 /* The PC is normally copied from the return column, which
3102 describes saves of LR. However, that version may have an
3103 extra bit set to indicate Thumb state. The bit is not
3105 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3106 return frame_unwind_got_constant (this_frame
, regnum
,
3107 arm_addr_bits_remove (gdbarch
, lr
));
3110 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3111 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3112 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3113 if (IS_THUMB_ADDR (lr
))
3117 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3120 internal_error (__FILE__
, __LINE__
,
3121 _("Unexpected register %d"), regnum
);
3126 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3127 struct dwarf2_frame_state_reg
*reg
,
3128 struct frame_info
*this_frame
)
3134 reg
->how
= DWARF2_FRAME_REG_FN
;
3135 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3138 reg
->how
= DWARF2_FRAME_REG_CFA
;
3143 /* Implement the stack_frame_destroyed_p gdbarch method. */
3146 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3148 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3149 unsigned int insn
, insn2
;
3150 int found_return
= 0, found_stack_adjust
= 0;
3151 CORE_ADDR func_start
, func_end
;
3155 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3158 /* The epilogue is a sequence of instructions along the following lines:
3160 - add stack frame size to SP or FP
3161 - [if frame pointer used] restore SP from FP
3162 - restore registers from SP [may include PC]
3163 - a return-type instruction [if PC wasn't already restored]
3165 In a first pass, we scan forward from the current PC and verify the
3166 instructions we find as compatible with this sequence, ending in a
3169 However, this is not sufficient to distinguish indirect function calls
3170 within a function from indirect tail calls in the epilogue in some cases.
3171 Therefore, if we didn't already find any SP-changing instruction during
3172 forward scan, we add a backward scanning heuristic to ensure we actually
3173 are in the epilogue. */
3176 while (scan_pc
< func_end
&& !found_return
)
3178 if (target_read_memory (scan_pc
, buf
, 2))
3182 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3184 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3186 else if (insn
== 0x46f7) /* mov pc, lr */
3188 else if (thumb_instruction_restores_sp (insn
))
3190 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3193 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3195 if (target_read_memory (scan_pc
, buf
, 2))
3199 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3201 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3203 if (insn2
& 0x8000) /* <registers> include PC. */
3206 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3207 && (insn2
& 0x0fff) == 0x0b04)
3209 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3212 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3213 && (insn2
& 0x0e00) == 0x0a00)
3225 /* Since any instruction in the epilogue sequence, with the possible
3226 exception of return itself, updates the stack pointer, we need to
3227 scan backwards for at most one instruction. Try either a 16-bit or
3228 a 32-bit instruction. This is just a heuristic, so we do not worry
3229 too much about false positives. */
3231 if (pc
- 4 < func_start
)
3233 if (target_read_memory (pc
- 4, buf
, 4))
3236 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3237 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3239 if (thumb_instruction_restores_sp (insn2
))
3240 found_stack_adjust
= 1;
3241 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3242 found_stack_adjust
= 1;
3243 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3244 && (insn2
& 0x0fff) == 0x0b04)
3245 found_stack_adjust
= 1;
3246 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3247 && (insn2
& 0x0e00) == 0x0a00)
3248 found_stack_adjust
= 1;
3250 return found_stack_adjust
;
3254 arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3256 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3259 CORE_ADDR func_start
, func_end
;
3261 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3264 /* We are in the epilogue if the previous instruction was a stack
3265 adjustment and the next instruction is a possible return (bx, mov
3266 pc, or pop). We could have to scan backwards to find the stack
3267 adjustment, or forwards to find the return, but this is a decent
3268 approximation. First scan forwards. */
3271 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3272 if (bits (insn
, 28, 31) != INST_NV
)
3274 if ((insn
& 0x0ffffff0) == 0x012fff10)
3277 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3280 else if ((insn
& 0x0fff0000) == 0x08bd0000
3281 && (insn
& 0x0000c000) != 0)
3282 /* POP (LDMIA), including PC or LR. */
3289 /* Scan backwards. This is just a heuristic, so do not worry about
3290 false positives from mode changes. */
3292 if (pc
< func_start
+ 4)
3295 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3296 if (arm_instruction_restores_sp (insn
))
3302 /* Implement the stack_frame_destroyed_p gdbarch method. */
3305 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3307 if (arm_pc_is_thumb (gdbarch
, pc
))
3308 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3310 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
3313 /* When arguments must be pushed onto the stack, they go on in reverse
3314 order. The code below implements a FILO (stack) to do this. */
3319 struct stack_item
*prev
;
3323 static struct stack_item
*
3324 push_stack_item (struct stack_item
*prev
, const gdb_byte
*contents
, int len
)
3326 struct stack_item
*si
;
3327 si
= XNEW (struct stack_item
);
3328 si
->data
= (gdb_byte
*) xmalloc (len
);
3331 memcpy (si
->data
, contents
, len
);
3335 static struct stack_item
*
3336 pop_stack_item (struct stack_item
*si
)
3338 struct stack_item
*dead
= si
;
3346 /* Return the alignment (in bytes) of the given type. */
3349 arm_type_align (struct type
*t
)
3355 t
= check_typedef (t
);
3356 switch (TYPE_CODE (t
))
3359 /* Should never happen. */
3360 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3364 case TYPE_CODE_ENUM
:
3368 case TYPE_CODE_RANGE
:
3370 case TYPE_CODE_RVALUE_REF
:
3371 case TYPE_CODE_CHAR
:
3372 case TYPE_CODE_BOOL
:
3373 return TYPE_LENGTH (t
);
3375 case TYPE_CODE_ARRAY
:
3376 if (TYPE_VECTOR (t
))
3378 /* Use the natural alignment for vector types (the same for
3379 scalar type), but the maximum alignment is 64-bit. */
3380 if (TYPE_LENGTH (t
) > 8)
3383 return TYPE_LENGTH (t
);
3386 return arm_type_align (TYPE_TARGET_TYPE (t
));
3387 case TYPE_CODE_COMPLEX
:
3388 return arm_type_align (TYPE_TARGET_TYPE (t
));
3390 case TYPE_CODE_STRUCT
:
3391 case TYPE_CODE_UNION
:
3393 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3395 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3403 /* Possible base types for a candidate for passing and returning in
3406 enum arm_vfp_cprc_base_type
3415 /* The length of one element of base type B. */
3418 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3422 case VFP_CPRC_SINGLE
:
3424 case VFP_CPRC_DOUBLE
:
3426 case VFP_CPRC_VEC64
:
3428 case VFP_CPRC_VEC128
:
3431 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3436 /* The character ('s', 'd' or 'q') for the type of VFP register used
3437 for passing base type B. */
3440 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3444 case VFP_CPRC_SINGLE
:
3446 case VFP_CPRC_DOUBLE
:
3448 case VFP_CPRC_VEC64
:
3450 case VFP_CPRC_VEC128
:
3453 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3458 /* Determine whether T may be part of a candidate for passing and
3459 returning in VFP registers, ignoring the limit on the total number
3460 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3461 classification of the first valid component found; if it is not
3462 VFP_CPRC_UNKNOWN, all components must have the same classification
3463 as *BASE_TYPE. If it is found that T contains a type not permitted
3464 for passing and returning in VFP registers, a type differently
3465 classified from *BASE_TYPE, or two types differently classified
3466 from each other, return -1, otherwise return the total number of
3467 base-type elements found (possibly 0 in an empty structure or
3468 array). Vector types are not currently supported, matching the
3469 generic AAPCS support. */
3472 arm_vfp_cprc_sub_candidate (struct type
*t
,
3473 enum arm_vfp_cprc_base_type
*base_type
)
3475 t
= check_typedef (t
);
3476 switch (TYPE_CODE (t
))
3479 switch (TYPE_LENGTH (t
))
3482 if (*base_type
== VFP_CPRC_UNKNOWN
)
3483 *base_type
= VFP_CPRC_SINGLE
;
3484 else if (*base_type
!= VFP_CPRC_SINGLE
)
3489 if (*base_type
== VFP_CPRC_UNKNOWN
)
3490 *base_type
= VFP_CPRC_DOUBLE
;
3491 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3500 case TYPE_CODE_COMPLEX
:
3501 /* Arguments of complex T where T is one of the types float or
3502 double get treated as if they are implemented as:
3511 switch (TYPE_LENGTH (t
))
3514 if (*base_type
== VFP_CPRC_UNKNOWN
)
3515 *base_type
= VFP_CPRC_SINGLE
;
3516 else if (*base_type
!= VFP_CPRC_SINGLE
)
3521 if (*base_type
== VFP_CPRC_UNKNOWN
)
3522 *base_type
= VFP_CPRC_DOUBLE
;
3523 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3532 case TYPE_CODE_ARRAY
:
3534 if (TYPE_VECTOR (t
))
3536 /* A 64-bit or 128-bit containerized vector type are VFP
3538 switch (TYPE_LENGTH (t
))
3541 if (*base_type
== VFP_CPRC_UNKNOWN
)
3542 *base_type
= VFP_CPRC_VEC64
;
3545 if (*base_type
== VFP_CPRC_UNKNOWN
)
3546 *base_type
= VFP_CPRC_VEC128
;
3557 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
),
3561 if (TYPE_LENGTH (t
) == 0)
3563 gdb_assert (count
== 0);
3566 else if (count
== 0)
3568 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3569 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3570 return TYPE_LENGTH (t
) / unitlen
;
3575 case TYPE_CODE_STRUCT
:
3580 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3584 if (!field_is_static (&TYPE_FIELD (t
, i
)))
3585 sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3587 if (sub_count
== -1)
3591 if (TYPE_LENGTH (t
) == 0)
3593 gdb_assert (count
== 0);
3596 else if (count
== 0)
3598 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3599 if (TYPE_LENGTH (t
) != unitlen
* count
)
3604 case TYPE_CODE_UNION
:
3609 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3611 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3613 if (sub_count
== -1)
3615 count
= (count
> sub_count
? count
: sub_count
);
3617 if (TYPE_LENGTH (t
) == 0)
3619 gdb_assert (count
== 0);
3622 else if (count
== 0)
3624 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3625 if (TYPE_LENGTH (t
) != unitlen
* count
)
3637 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3638 if passed to or returned from a non-variadic function with the VFP
3639 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3640 *BASE_TYPE to the base type for T and *COUNT to the number of
3641 elements of that base type before returning. */
3644 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3647 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3648 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3649 if (c
<= 0 || c
> 4)
3656 /* Return 1 if the VFP ABI should be used for passing arguments to and
3657 returning values from a function of type FUNC_TYPE, 0
3661 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3663 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3664 /* Variadic functions always use the base ABI. Assume that functions
3665 without debug info are not variadic. */
3666 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3668 /* The VFP ABI is only supported as a variant of AAPCS. */
3669 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3671 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3674 /* We currently only support passing parameters in integer registers, which
3675 conforms with GCC's default model, and VFP argument passing following
3676 the VFP variant of AAPCS. Several other variants exist and
3677 we should probably support some of them based on the selected ABI. */
3680 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3681 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3682 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3683 CORE_ADDR struct_addr
)
3685 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3689 struct stack_item
*si
= NULL
;
3692 unsigned vfp_regs_free
= (1 << 16) - 1;
3694 /* Determine the type of this function and whether the VFP ABI
3696 ftype
= check_typedef (value_type (function
));
3697 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3698 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3699 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3701 /* Set the return address. For the ARM, the return breakpoint is
3702 always at BP_ADDR. */
3703 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3705 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3707 /* Walk through the list of args and determine how large a temporary
3708 stack is required. Need to take care here as structs may be
3709 passed on the stack, and we have to push them. */
3712 argreg
= ARM_A1_REGNUM
;
3715 /* The struct_return pointer occupies the first parameter
3716 passing register. */
3720 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3721 gdbarch_register_name (gdbarch
, argreg
),
3722 paddress (gdbarch
, struct_addr
));
3723 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3727 for (argnum
= 0; argnum
< nargs
; argnum
++)
3730 struct type
*arg_type
;
3731 struct type
*target_type
;
3732 enum type_code typecode
;
3733 const bfd_byte
*val
;
3735 enum arm_vfp_cprc_base_type vfp_base_type
;
3737 int may_use_core_reg
= 1;
3739 arg_type
= check_typedef (value_type (args
[argnum
]));
3740 len
= TYPE_LENGTH (arg_type
);
3741 target_type
= TYPE_TARGET_TYPE (arg_type
);
3742 typecode
= TYPE_CODE (arg_type
);
3743 val
= value_contents (args
[argnum
]);
3745 align
= arm_type_align (arg_type
);
3746 /* Round alignment up to a whole number of words. */
3747 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3748 /* Different ABIs have different maximum alignments. */
3749 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3751 /* The APCS ABI only requires word alignment. */
3752 align
= INT_REGISTER_SIZE
;
3756 /* The AAPCS requires at most doubleword alignment. */
3757 if (align
> INT_REGISTER_SIZE
* 2)
3758 align
= INT_REGISTER_SIZE
* 2;
3762 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3770 /* Because this is a CPRC it cannot go in a core register or
3771 cause a core register to be skipped for alignment.
3772 Either it goes in VFP registers and the rest of this loop
3773 iteration is skipped for this argument, or it goes on the
3774 stack (and the stack alignment code is correct for this
3776 may_use_core_reg
= 0;
3778 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3779 shift
= unit_length
/ 4;
3780 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3781 for (regno
= 0; regno
< 16; regno
+= shift
)
3782 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3791 vfp_regs_free
&= ~(mask
<< regno
);
3792 reg_scaled
= regno
/ shift
;
3793 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3794 for (i
= 0; i
< vfp_base_count
; i
++)
3798 if (reg_char
== 'q')
3799 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3800 val
+ i
* unit_length
);
3803 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3804 reg_char
, reg_scaled
+ i
);
3805 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3807 regcache
->cooked_write (regnum
, val
+ i
* unit_length
);
3814 /* This CPRC could not go in VFP registers, so all VFP
3815 registers are now marked as used. */
3820 /* Push stack padding for dowubleword alignment. */
3821 if (nstack
& (align
- 1))
3823 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3824 nstack
+= INT_REGISTER_SIZE
;
3827 /* Doubleword aligned quantities must go in even register pairs. */
3828 if (may_use_core_reg
3829 && argreg
<= ARM_LAST_ARG_REGNUM
3830 && align
> INT_REGISTER_SIZE
3834 /* If the argument is a pointer to a function, and it is a
3835 Thumb function, create a LOCAL copy of the value and set
3836 the THUMB bit in it. */
3837 if (TYPE_CODE_PTR
== typecode
3838 && target_type
!= NULL
3839 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3841 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3842 if (arm_pc_is_thumb (gdbarch
, regval
))
3844 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3845 store_unsigned_integer (copy
, len
, byte_order
,
3846 MAKE_THUMB_ADDR (regval
));
3851 /* Copy the argument to general registers or the stack in
3852 register-sized pieces. Large arguments are split between
3853 registers and stack. */
3856 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3858 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3860 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3862 /* The argument is being passed in a general purpose
3864 if (byte_order
== BFD_ENDIAN_BIG
)
3865 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3867 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3869 gdbarch_register_name
3871 phex (regval
, INT_REGISTER_SIZE
));
3872 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3877 gdb_byte buf
[INT_REGISTER_SIZE
];
3879 memset (buf
, 0, sizeof (buf
));
3880 store_unsigned_integer (buf
, partial_len
, byte_order
, regval
);
3882 /* Push the arguments onto the stack. */
3884 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3886 si
= push_stack_item (si
, buf
, INT_REGISTER_SIZE
);
3887 nstack
+= INT_REGISTER_SIZE
;
3894 /* If we have an odd number of words to push, then decrement the stack
3895 by one word now, so first stack argument will be dword aligned. */
3902 write_memory (sp
, si
->data
, si
->len
);
3903 si
= pop_stack_item (si
);
3906 /* Finally, update teh SP register. */
3907 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3913 /* Always align the frame to an 8-byte boundary. This is required on
3914 some platforms and harmless on the rest. */
3917 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3919 /* Align the stack to eight bytes. */
3920 return sp
& ~ (CORE_ADDR
) 7;
3924 print_fpu_flags (struct ui_file
*file
, int flags
)
3926 if (flags
& (1 << 0))
3927 fputs_filtered ("IVO ", file
);
3928 if (flags
& (1 << 1))
3929 fputs_filtered ("DVZ ", file
);
3930 if (flags
& (1 << 2))
3931 fputs_filtered ("OFL ", file
);
3932 if (flags
& (1 << 3))
3933 fputs_filtered ("UFL ", file
);
3934 if (flags
& (1 << 4))
3935 fputs_filtered ("INX ", file
);
3936 fputc_filtered ('\n', file
);
3939 /* Print interesting information about the floating point processor
3940 (if present) or emulator. */
3942 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3943 struct frame_info
*frame
, const char *args
)
3945 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3948 type
= (status
>> 24) & 127;
3949 if (status
& (1 << 31))
3950 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3952 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3953 /* i18n: [floating point unit] mask */
3954 fputs_filtered (_("mask: "), file
);
3955 print_fpu_flags (file
, status
>> 16);
3956 /* i18n: [floating point unit] flags */
3957 fputs_filtered (_("flags: "), file
);
3958 print_fpu_flags (file
, status
);
3961 /* Construct the ARM extended floating point type. */
3962 static struct type
*
3963 arm_ext_type (struct gdbarch
*gdbarch
)
3965 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3967 if (!tdep
->arm_ext_type
)
3969 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
3970 floatformats_arm_ext
);
3972 return tdep
->arm_ext_type
;
3975 static struct type
*
3976 arm_neon_double_type (struct gdbarch
*gdbarch
)
3978 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3980 if (tdep
->neon_double_type
== NULL
)
3982 struct type
*t
, *elem
;
3984 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
3986 elem
= builtin_type (gdbarch
)->builtin_uint8
;
3987 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
3988 elem
= builtin_type (gdbarch
)->builtin_uint16
;
3989 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
3990 elem
= builtin_type (gdbarch
)->builtin_uint32
;
3991 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
3992 elem
= builtin_type (gdbarch
)->builtin_uint64
;
3993 append_composite_type_field (t
, "u64", elem
);
3994 elem
= builtin_type (gdbarch
)->builtin_float
;
3995 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
3996 elem
= builtin_type (gdbarch
)->builtin_double
;
3997 append_composite_type_field (t
, "f64", elem
);
3999 TYPE_VECTOR (t
) = 1;
4000 TYPE_NAME (t
) = "neon_d";
4001 tdep
->neon_double_type
= t
;
4004 return tdep
->neon_double_type
;
4007 /* FIXME: The vector types are not correctly ordered on big-endian
4008 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4009 bits of d0 - regardless of what unit size is being held in d0. So
4010 the offset of the first uint8 in d0 is 7, but the offset of the
4011 first float is 4. This code works as-is for little-endian
4014 static struct type
*
4015 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4017 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4019 if (tdep
->neon_quad_type
== NULL
)
4021 struct type
*t
, *elem
;
4023 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4025 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4026 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4027 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4028 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4029 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4030 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4031 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4032 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4033 elem
= builtin_type (gdbarch
)->builtin_float
;
4034 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4035 elem
= builtin_type (gdbarch
)->builtin_double
;
4036 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4038 TYPE_VECTOR (t
) = 1;
4039 TYPE_NAME (t
) = "neon_q";
4040 tdep
->neon_quad_type
= t
;
4043 return tdep
->neon_quad_type
;
4046 /* Return the GDB type object for the "standard" data type of data in
4049 static struct type
*
4050 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4052 int num_regs
= gdbarch_num_regs (gdbarch
);
4054 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4055 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4056 return builtin_type (gdbarch
)->builtin_float
;
4058 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4059 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4060 return arm_neon_quad_type (gdbarch
);
4062 /* If the target description has register information, we are only
4063 in this function so that we can override the types of
4064 double-precision registers for NEON. */
4065 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4067 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4069 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4070 && TYPE_CODE (t
) == TYPE_CODE_FLT
4071 && gdbarch_tdep (gdbarch
)->have_neon
)
4072 return arm_neon_double_type (gdbarch
);
4077 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4079 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4080 return builtin_type (gdbarch
)->builtin_void
;
4082 return arm_ext_type (gdbarch
);
4084 else if (regnum
== ARM_SP_REGNUM
)
4085 return builtin_type (gdbarch
)->builtin_data_ptr
;
4086 else if (regnum
== ARM_PC_REGNUM
)
4087 return builtin_type (gdbarch
)->builtin_func_ptr
;
4088 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4089 /* These registers are only supported on targets which supply
4090 an XML description. */
4091 return builtin_type (gdbarch
)->builtin_int0
;
4093 return builtin_type (gdbarch
)->builtin_uint32
;
4096 /* Map a DWARF register REGNUM onto the appropriate GDB register
4100 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4102 /* Core integer regs. */
4103 if (reg
>= 0 && reg
<= 15)
4106 /* Legacy FPA encoding. These were once used in a way which
4107 overlapped with VFP register numbering, so their use is
4108 discouraged, but GDB doesn't support the ARM toolchain
4109 which used them for VFP. */
4110 if (reg
>= 16 && reg
<= 23)
4111 return ARM_F0_REGNUM
+ reg
- 16;
4113 /* New assignments for the FPA registers. */
4114 if (reg
>= 96 && reg
<= 103)
4115 return ARM_F0_REGNUM
+ reg
- 96;
4117 /* WMMX register assignments. */
4118 if (reg
>= 104 && reg
<= 111)
4119 return ARM_WCGR0_REGNUM
+ reg
- 104;
4121 if (reg
>= 112 && reg
<= 127)
4122 return ARM_WR0_REGNUM
+ reg
- 112;
4124 if (reg
>= 192 && reg
<= 199)
4125 return ARM_WC0_REGNUM
+ reg
- 192;
4127 /* VFP v2 registers. A double precision value is actually
4128 in d1 rather than s2, but the ABI only defines numbering
4129 for the single precision registers. This will "just work"
4130 in GDB for little endian targets (we'll read eight bytes,
4131 starting in s0 and then progressing to s1), but will be
4132 reversed on big endian targets with VFP. This won't
4133 be a problem for the new Neon quad registers; you're supposed
4134 to use DW_OP_piece for those. */
4135 if (reg
>= 64 && reg
<= 95)
4139 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4140 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4144 /* VFP v3 / Neon registers. This range is also used for VFP v2
4145 registers, except that it now describes d0 instead of s0. */
4146 if (reg
>= 256 && reg
<= 287)
4150 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4151 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4158 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4160 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4163 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4165 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4166 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4168 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4169 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4171 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4172 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4174 if (reg
< NUM_GREGS
)
4175 return SIM_ARM_R0_REGNUM
+ reg
;
4178 if (reg
< NUM_FREGS
)
4179 return SIM_ARM_FP0_REGNUM
+ reg
;
4182 if (reg
< NUM_SREGS
)
4183 return SIM_ARM_FPS_REGNUM
+ reg
;
4186 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4189 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4190 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4191 NULL if an error occurs. BUF is freed. */
4194 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
4195 int old_len
, int new_len
)
4198 int bytes_to_read
= new_len
- old_len
;
4200 new_buf
= (gdb_byte
*) xmalloc (new_len
);
4201 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
4203 if (target_read_code (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
4211 /* An IT block is at most the 2-byte IT instruction followed by
4212 four 4-byte instructions. The furthest back we must search to
4213 find an IT block that affects the current instruction is thus
4214 2 + 3 * 4 == 14 bytes. */
4215 #define MAX_IT_BLOCK_PREFIX 14
4217 /* Use a quick scan if there are more than this many bytes of
4219 #define IT_SCAN_THRESHOLD 32
4221 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4222 A breakpoint in an IT block may not be hit, depending on the
4225 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
4229 CORE_ADDR boundary
, func_start
;
4231 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
4232 int i
, any
, last_it
, last_it_count
;
4234 /* If we are using BKPT breakpoints, none of this is necessary. */
4235 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
4238 /* ARM mode does not have this problem. */
4239 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
4242 /* We are setting a breakpoint in Thumb code that could potentially
4243 contain an IT block. The first step is to find how much Thumb
4244 code there is; we do not need to read outside of known Thumb
4246 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
4248 /* Thumb-2 code must have mapping symbols to have a chance. */
4251 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
4253 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
4254 && func_start
> boundary
)
4255 boundary
= func_start
;
4257 /* Search for a candidate IT instruction. We have to do some fancy
4258 footwork to distinguish a real IT instruction from the second
4259 half of a 32-bit instruction, but there is no need for that if
4260 there's no candidate. */
4261 buf_len
= std::min (bpaddr
- boundary
, (CORE_ADDR
) MAX_IT_BLOCK_PREFIX
);
4263 /* No room for an IT instruction. */
4266 buf
= (gdb_byte
*) xmalloc (buf_len
);
4267 if (target_read_code (bpaddr
- buf_len
, buf
, buf_len
) != 0)
4270 for (i
= 0; i
< buf_len
; i
+= 2)
4272 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4273 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4286 /* OK, the code bytes before this instruction contain at least one
4287 halfword which resembles an IT instruction. We know that it's
4288 Thumb code, but there are still two possibilities. Either the
4289 halfword really is an IT instruction, or it is the second half of
4290 a 32-bit Thumb instruction. The only way we can tell is to
4291 scan forwards from a known instruction boundary. */
4292 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
4296 /* There's a lot of code before this instruction. Start with an
4297 optimistic search; it's easy to recognize halfwords that can
4298 not be the start of a 32-bit instruction, and use that to
4299 lock on to the instruction boundaries. */
4300 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
4303 buf_len
= IT_SCAN_THRESHOLD
;
4306 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
4308 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4309 if (thumb_insn_size (inst1
) == 2)
4316 /* At this point, if DEFINITE, BUF[I] is the first place we
4317 are sure that we know the instruction boundaries, and it is far
4318 enough from BPADDR that we could not miss an IT instruction
4319 affecting BPADDR. If ! DEFINITE, give up - start from a
4323 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
4327 buf_len
= bpaddr
- boundary
;
4333 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
4336 buf_len
= bpaddr
- boundary
;
4340 /* Scan forwards. Find the last IT instruction before BPADDR. */
4345 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4347 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4352 else if (inst1
& 0x0002)
4354 else if (inst1
& 0x0004)
4359 i
+= thumb_insn_size (inst1
);
4365 /* There wasn't really an IT instruction after all. */
4368 if (last_it_count
< 1)
4369 /* It was too far away. */
4372 /* This really is a trouble spot. Move the breakpoint to the IT
4374 return bpaddr
- buf_len
+ last_it
;
4377 /* ARM displaced stepping support.
4379 Generally ARM displaced stepping works as follows:
4381 1. When an instruction is to be single-stepped, it is first decoded by
4382 arm_process_displaced_insn. Depending on the type of instruction, it is
4383 then copied to a scratch location, possibly in a modified form. The
4384 copy_* set of functions performs such modification, as necessary. A
4385 breakpoint is placed after the modified instruction in the scratch space
4386 to return control to GDB. Note in particular that instructions which
4387 modify the PC will no longer do so after modification.
4389 2. The instruction is single-stepped, by setting the PC to the scratch
4390 location address, and resuming. Control returns to GDB when the
4393 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4394 function used for the current instruction. This function's job is to
4395 put the CPU/memory state back to what it would have been if the
4396 instruction had been executed unmodified in its original location. */
4398 /* NOP instruction (mov r0, r0). */
4399 #define ARM_NOP 0xe1a00000
4400 #define THUMB_NOP 0x4600
4402 /* Helper for register reads for displaced stepping. In particular, this
4403 returns the PC as it would be seen by the instruction at its original
4407 displaced_read_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4411 CORE_ADDR from
= dsc
->insn_addr
;
4413 if (regno
== ARM_PC_REGNUM
)
4415 /* Compute pipeline offset:
4416 - When executing an ARM instruction, PC reads as the address of the
4417 current instruction plus 8.
4418 - When executing a Thumb instruction, PC reads as the address of the
4419 current instruction plus 4. */
4426 if (debug_displaced
)
4427 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
4428 (unsigned long) from
);
4429 return (ULONGEST
) from
;
4433 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
4434 if (debug_displaced
)
4435 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
4436 regno
, (unsigned long) ret
);
4442 displaced_in_arm_mode (struct regcache
*regs
)
4445 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4447 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4449 return (ps
& t_bit
) == 0;
4452 /* Write to the PC as from a branch instruction. */
4455 branch_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4459 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4460 architecture versions < 6. */
4461 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4462 val
& ~(ULONGEST
) 0x3);
4464 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4465 val
& ~(ULONGEST
) 0x1);
4468 /* Write to the PC as from a branch-exchange instruction. */
4471 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
4474 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4476 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4480 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
4481 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
4483 else if ((val
& 2) == 0)
4485 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4486 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
4490 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4491 mode, align dest to 4 bytes). */
4492 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4493 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4494 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
4498 /* Write to the PC as if from a load instruction. */
4501 load_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4504 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
4505 bx_write_pc (regs
, val
);
4507 branch_write_pc (regs
, dsc
, val
);
4510 /* Write to the PC as if from an ALU instruction. */
4513 alu_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4516 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
4517 bx_write_pc (regs
, val
);
4519 branch_write_pc (regs
, dsc
, val
);
4522 /* Helper for writing to registers for displaced stepping. Writing to the PC
4523 has a varying effects depending on the instruction which does the write:
4524 this is controlled by the WRITE_PC argument. */
4527 displaced_write_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4528 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
4530 if (regno
== ARM_PC_REGNUM
)
4532 if (debug_displaced
)
4533 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
4534 (unsigned long) val
);
4537 case BRANCH_WRITE_PC
:
4538 branch_write_pc (regs
, dsc
, val
);
4542 bx_write_pc (regs
, val
);
4546 load_write_pc (regs
, dsc
, val
);
4550 alu_write_pc (regs
, dsc
, val
);
4553 case CANNOT_WRITE_PC
:
4554 warning (_("Instruction wrote to PC in an unexpected way when "
4555 "single-stepping"));
4559 internal_error (__FILE__
, __LINE__
,
4560 _("Invalid argument to displaced_write_reg"));
4563 dsc
->wrote_to_pc
= 1;
4567 if (debug_displaced
)
4568 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
4569 regno
, (unsigned long) val
);
4570 regcache_cooked_write_unsigned (regs
, regno
, val
);
4574 /* This function is used to concisely determine if an instruction INSN
4575 references PC. Register fields of interest in INSN should have the
4576 corresponding fields of BITMASK set to 0b1111. The function
4577 returns return 1 if any of these fields in INSN reference the PC
4578 (also 0b1111, r15), else it returns 0. */
4581 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
4583 uint32_t lowbit
= 1;
4585 while (bitmask
!= 0)
4589 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
4595 mask
= lowbit
* 0xf;
4597 if ((insn
& mask
) == mask
)
4606 /* The simplest copy function. Many instructions have the same effect no
4607 matter what address they are executed at: in those cases, use this. */
4610 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
4611 const char *iname
, arm_displaced_step_closure
*dsc
)
4613 if (debug_displaced
)
4614 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
4615 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
4618 dsc
->modinsn
[0] = insn
;
4624 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
4625 uint16_t insn2
, const char *iname
,
4626 arm_displaced_step_closure
*dsc
)
4628 if (debug_displaced
)
4629 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
4630 "opcode/class '%s' unmodified\n", insn1
, insn2
,
4633 dsc
->modinsn
[0] = insn1
;
4634 dsc
->modinsn
[1] = insn2
;
4640 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4643 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
4645 arm_displaced_step_closure
*dsc
)
4647 if (debug_displaced
)
4648 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
4649 "opcode/class '%s' unmodified\n", insn
,
4652 dsc
->modinsn
[0] = insn
;
4657 /* Preload instructions with immediate offset. */
4660 cleanup_preload (struct gdbarch
*gdbarch
,
4661 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4663 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4664 if (!dsc
->u
.preload
.immed
)
4665 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
4669 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4670 arm_displaced_step_closure
*dsc
, unsigned int rn
)
4673 /* Preload instructions:
4675 {pli/pld} [rn, #+/-imm]
4677 {pli/pld} [r0, #+/-imm]. */
4679 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4680 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4681 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4682 dsc
->u
.preload
.immed
= 1;
4684 dsc
->cleanup
= &cleanup_preload
;
4688 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
4689 arm_displaced_step_closure
*dsc
)
4691 unsigned int rn
= bits (insn
, 16, 19);
4693 if (!insn_references_pc (insn
, 0x000f0000ul
))
4694 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
4696 if (debug_displaced
)
4697 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4698 (unsigned long) insn
);
4700 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4702 install_preload (gdbarch
, regs
, dsc
, rn
);
4708 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
4709 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4711 unsigned int rn
= bits (insn1
, 0, 3);
4712 unsigned int u_bit
= bit (insn1
, 7);
4713 int imm12
= bits (insn2
, 0, 11);
4716 if (rn
!= ARM_PC_REGNUM
)
4717 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
4719 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4720 PLD (literal) Encoding T1. */
4721 if (debug_displaced
)
4722 fprintf_unfiltered (gdb_stdlog
,
4723 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4724 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
4730 /* Rewrite instruction {pli/pld} PC imm12 into:
4731 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4735 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4737 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4738 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4740 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
4742 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
4743 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
4744 dsc
->u
.preload
.immed
= 0;
4746 /* {pli/pld} [r0, r1] */
4747 dsc
->modinsn
[0] = insn1
& 0xfff0;
4748 dsc
->modinsn
[1] = 0xf001;
4751 dsc
->cleanup
= &cleanup_preload
;
4755 /* Preload instructions with register offset. */
4758 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
4759 arm_displaced_step_closure
*dsc
, unsigned int rn
,
4762 ULONGEST rn_val
, rm_val
;
4764 /* Preload register-offset instructions:
4766 {pli/pld} [rn, rm {, shift}]
4768 {pli/pld} [r0, r1 {, shift}]. */
4770 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4771 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4772 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4773 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
4774 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4775 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
4776 dsc
->u
.preload
.immed
= 0;
4778 dsc
->cleanup
= &cleanup_preload
;
4782 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
4783 struct regcache
*regs
,
4784 arm_displaced_step_closure
*dsc
)
4786 unsigned int rn
= bits (insn
, 16, 19);
4787 unsigned int rm
= bits (insn
, 0, 3);
4790 if (!insn_references_pc (insn
, 0x000f000ful
))
4791 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
4793 if (debug_displaced
)
4794 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4795 (unsigned long) insn
);
4797 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
4799 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
4803 /* Copy/cleanup coprocessor load and store instructions. */
4806 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
4807 struct regcache
*regs
,
4808 arm_displaced_step_closure
*dsc
)
4810 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
4812 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4814 if (dsc
->u
.ldst
.writeback
)
4815 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
4819 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4820 arm_displaced_step_closure
*dsc
,
4821 int writeback
, unsigned int rn
)
4825 /* Coprocessor load/store instructions:
4827 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4829 {stc/stc2} [r0, #+/-imm].
4831 ldc/ldc2 are handled identically. */
4833 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4834 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4835 /* PC should be 4-byte aligned. */
4836 rn_val
= rn_val
& 0xfffffffc;
4837 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4839 dsc
->u
.ldst
.writeback
= writeback
;
4840 dsc
->u
.ldst
.rn
= rn
;
4842 dsc
->cleanup
= &cleanup_copro_load_store
;
4846 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
4847 struct regcache
*regs
,
4848 arm_displaced_step_closure
*dsc
)
4850 unsigned int rn
= bits (insn
, 16, 19);
4852 if (!insn_references_pc (insn
, 0x000f0000ul
))
4853 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
4855 if (debug_displaced
)
4856 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4857 "load/store insn %.8lx\n", (unsigned long) insn
);
4859 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4861 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
4867 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
4868 uint16_t insn2
, struct regcache
*regs
,
4869 arm_displaced_step_closure
*dsc
)
4871 unsigned int rn
= bits (insn1
, 0, 3);
4873 if (rn
!= ARM_PC_REGNUM
)
4874 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
4875 "copro load/store", dsc
);
4877 if (debug_displaced
)
4878 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4879 "load/store insn %.4x%.4x\n", insn1
, insn2
);
4881 dsc
->modinsn
[0] = insn1
& 0xfff0;
4882 dsc
->modinsn
[1] = insn2
;
4885 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4886 doesn't support writeback, so pass 0. */
4887 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
4892 /* Clean up branch instructions (actually perform the branch, by setting
4896 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4897 arm_displaced_step_closure
*dsc
)
4899 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
4900 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
4901 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
4902 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
4907 if (dsc
->u
.branch
.link
)
4909 /* The value of LR should be the next insn of current one. In order
4910 not to confuse logic hanlding later insn `bx lr', if current insn mode
4911 is Thumb, the bit 0 of LR value should be set to 1. */
4912 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
4915 next_insn_addr
|= 0x1;
4917 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
4921 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
4924 /* Copy B/BL/BLX instructions with immediate destinations. */
4927 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4928 arm_displaced_step_closure
*dsc
,
4929 unsigned int cond
, int exchange
, int link
, long offset
)
4931 /* Implement "BL<cond> <label>" as:
4933 Preparation: cond <- instruction condition
4934 Insn: mov r0, r0 (nop)
4935 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4937 B<cond> similar, but don't set r14 in cleanup. */
4939 dsc
->u
.branch
.cond
= cond
;
4940 dsc
->u
.branch
.link
= link
;
4941 dsc
->u
.branch
.exchange
= exchange
;
4943 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
4944 if (link
&& exchange
)
4945 /* For BLX, offset is computed from the Align (PC, 4). */
4946 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
4949 dsc
->u
.branch
.dest
+= 4 + offset
;
4951 dsc
->u
.branch
.dest
+= 8 + offset
;
4953 dsc
->cleanup
= &cleanup_branch
;
4956 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
4957 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4959 unsigned int cond
= bits (insn
, 28, 31);
4960 int exchange
= (cond
== 0xf);
4961 int link
= exchange
|| bit (insn
, 24);
4964 if (debug_displaced
)
4965 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
4966 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
4967 (unsigned long) insn
);
4969 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4970 then arrange the switch into Thumb mode. */
4971 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
4973 offset
= bits (insn
, 0, 23) << 2;
4975 if (bit (offset
, 25))
4976 offset
= offset
| ~0x3ffffff;
4978 dsc
->modinsn
[0] = ARM_NOP
;
4980 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
4985 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
4986 uint16_t insn2
, struct regcache
*regs
,
4987 arm_displaced_step_closure
*dsc
)
4989 int link
= bit (insn2
, 14);
4990 int exchange
= link
&& !bit (insn2
, 12);
4993 int j1
= bit (insn2
, 13);
4994 int j2
= bit (insn2
, 11);
4995 int s
= sbits (insn1
, 10, 10);
4996 int i1
= !(j1
^ bit (insn1
, 10));
4997 int i2
= !(j2
^ bit (insn1
, 10));
4999 if (!link
&& !exchange
) /* B */
5001 offset
= (bits (insn2
, 0, 10) << 1);
5002 if (bit (insn2
, 12)) /* Encoding T4 */
5004 offset
|= (bits (insn1
, 0, 9) << 12)
5010 else /* Encoding T3 */
5012 offset
|= (bits (insn1
, 0, 5) << 12)
5016 cond
= bits (insn1
, 6, 9);
5021 offset
= (bits (insn1
, 0, 9) << 12);
5022 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
5023 offset
|= exchange
?
5024 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
5027 if (debug_displaced
)
5028 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
5029 "%.4x %.4x with offset %.8lx\n",
5030 link
? (exchange
) ? "blx" : "bl" : "b",
5031 insn1
, insn2
, offset
);
5033 dsc
->modinsn
[0] = THUMB_NOP
;
5035 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5039 /* Copy B Thumb instructions. */
5041 thumb_copy_b (struct gdbarch
*gdbarch
, uint16_t insn
,
5042 arm_displaced_step_closure
*dsc
)
5044 unsigned int cond
= 0;
5046 unsigned short bit_12_15
= bits (insn
, 12, 15);
5047 CORE_ADDR from
= dsc
->insn_addr
;
5049 if (bit_12_15
== 0xd)
5051 /* offset = SignExtend (imm8:0, 32) */
5052 offset
= sbits ((insn
<< 1), 0, 8);
5053 cond
= bits (insn
, 8, 11);
5055 else if (bit_12_15
== 0xe) /* Encoding T2 */
5057 offset
= sbits ((insn
<< 1), 0, 11);
5061 if (debug_displaced
)
5062 fprintf_unfiltered (gdb_stdlog
,
5063 "displaced: copying b immediate insn %.4x "
5064 "with offset %d\n", insn
, offset
);
5066 dsc
->u
.branch
.cond
= cond
;
5067 dsc
->u
.branch
.link
= 0;
5068 dsc
->u
.branch
.exchange
= 0;
5069 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
5071 dsc
->modinsn
[0] = THUMB_NOP
;
5073 dsc
->cleanup
= &cleanup_branch
;
5078 /* Copy BX/BLX with register-specified destinations. */
5081 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5082 arm_displaced_step_closure
*dsc
, int link
,
5083 unsigned int cond
, unsigned int rm
)
5085 /* Implement {BX,BLX}<cond> <reg>" as:
5087 Preparation: cond <- instruction condition
5088 Insn: mov r0, r0 (nop)
5089 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5091 Don't set r14 in cleanup for BX. */
5093 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
5095 dsc
->u
.branch
.cond
= cond
;
5096 dsc
->u
.branch
.link
= link
;
5098 dsc
->u
.branch
.exchange
= 1;
5100 dsc
->cleanup
= &cleanup_branch
;
5104 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5105 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5107 unsigned int cond
= bits (insn
, 28, 31);
5110 int link
= bit (insn
, 5);
5111 unsigned int rm
= bits (insn
, 0, 3);
5113 if (debug_displaced
)
5114 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
5115 (unsigned long) insn
);
5117 dsc
->modinsn
[0] = ARM_NOP
;
5119 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
5124 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5125 struct regcache
*regs
,
5126 arm_displaced_step_closure
*dsc
)
5128 int link
= bit (insn
, 7);
5129 unsigned int rm
= bits (insn
, 3, 6);
5131 if (debug_displaced
)
5132 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
5133 (unsigned short) insn
);
5135 dsc
->modinsn
[0] = THUMB_NOP
;
5137 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
5143 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5146 cleanup_alu_imm (struct gdbarch
*gdbarch
,
5147 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5149 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5150 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5151 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5152 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5156 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5157 arm_displaced_step_closure
*dsc
)
5159 unsigned int rn
= bits (insn
, 16, 19);
5160 unsigned int rd
= bits (insn
, 12, 15);
5161 unsigned int op
= bits (insn
, 21, 24);
5162 int is_mov
= (op
== 0xd);
5163 ULONGEST rd_val
, rn_val
;
5165 if (!insn_references_pc (insn
, 0x000ff000ul
))
5166 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
5168 if (debug_displaced
)
5169 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
5170 "%.8lx\n", is_mov
? "move" : "ALU",
5171 (unsigned long) insn
);
5173 /* Instruction is of form:
5175 <op><cond> rd, [rn,] #imm
5179 Preparation: tmp1, tmp2 <- r0, r1;
5181 Insn: <op><cond> r0, r1, #imm
5182 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5185 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5186 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5187 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5188 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5189 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5190 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5194 dsc
->modinsn
[0] = insn
& 0xfff00fff;
5196 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
5198 dsc
->cleanup
= &cleanup_alu_imm
;
5204 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5205 uint16_t insn2
, struct regcache
*regs
,
5206 arm_displaced_step_closure
*dsc
)
5208 unsigned int op
= bits (insn1
, 5, 8);
5209 unsigned int rn
, rm
, rd
;
5210 ULONGEST rd_val
, rn_val
;
5212 rn
= bits (insn1
, 0, 3); /* Rn */
5213 rm
= bits (insn2
, 0, 3); /* Rm */
5214 rd
= bits (insn2
, 8, 11); /* Rd */
5216 /* This routine is only called for instruction MOV. */
5217 gdb_assert (op
== 0x2 && rn
== 0xf);
5219 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
5220 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
5222 if (debug_displaced
)
5223 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
5224 "ALU", insn1
, insn2
);
5226 /* Instruction is of form:
5228 <op><cond> rd, [rn,] #imm
5232 Preparation: tmp1, tmp2 <- r0, r1;
5234 Insn: <op><cond> r0, r1, #imm
5235 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5238 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5239 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5240 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5241 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5242 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5243 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5246 dsc
->modinsn
[0] = insn1
;
5247 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
5250 dsc
->cleanup
= &cleanup_alu_imm
;
5255 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5258 cleanup_alu_reg (struct gdbarch
*gdbarch
,
5259 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5264 rd_val
= displaced_read_reg (regs
, dsc
, 0);
5266 for (i
= 0; i
< 3; i
++)
5267 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5269 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5273 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5274 arm_displaced_step_closure
*dsc
,
5275 unsigned int rd
, unsigned int rn
, unsigned int rm
)
5277 ULONGEST rd_val
, rn_val
, rm_val
;
5279 /* Instruction is of form:
5281 <op><cond> rd, [rn,] rm [, <shift>]
5285 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5286 r0, r1, r2 <- rd, rn, rm
5287 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5288 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5291 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5292 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5293 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5294 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5295 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5296 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5297 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5298 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5299 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5302 dsc
->cleanup
= &cleanup_alu_reg
;
5306 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5307 arm_displaced_step_closure
*dsc
)
5309 unsigned int op
= bits (insn
, 21, 24);
5310 int is_mov
= (op
== 0xd);
5312 if (!insn_references_pc (insn
, 0x000ff00ful
))
5313 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
5315 if (debug_displaced
)
5316 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
5317 is_mov
? "move" : "ALU", (unsigned long) insn
);
5320 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
5322 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
5324 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
5330 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5331 struct regcache
*regs
,
5332 arm_displaced_step_closure
*dsc
)
5336 rm
= bits (insn
, 3, 6);
5337 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
5339 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
5340 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
5342 if (debug_displaced
)
5343 fprintf_unfiltered (gdb_stdlog
, "displaced: copying ALU reg insn %.4x\n",
5344 (unsigned short) insn
);
5346 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
5348 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
5353 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5356 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
5357 struct regcache
*regs
,
5358 arm_displaced_step_closure
*dsc
)
5360 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5363 for (i
= 0; i
< 4; i
++)
5364 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5366 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5370 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5371 arm_displaced_step_closure
*dsc
,
5372 unsigned int rd
, unsigned int rn
, unsigned int rm
,
5376 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
5378 /* Instruction is of form:
5380 <op><cond> rd, [rn,] rm, <shift> rs
5384 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5385 r0, r1, r2, r3 <- rd, rn, rm, rs
5386 Insn: <op><cond> r0, r1, r2, <shift> r3
5388 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5392 for (i
= 0; i
< 4; i
++)
5393 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
5395 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5396 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5397 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5398 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
5399 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5400 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5401 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5402 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
5404 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
5408 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5409 struct regcache
*regs
,
5410 arm_displaced_step_closure
*dsc
)
5412 unsigned int op
= bits (insn
, 21, 24);
5413 int is_mov
= (op
== 0xd);
5414 unsigned int rd
, rn
, rm
, rs
;
5416 if (!insn_references_pc (insn
, 0x000fff0ful
))
5417 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
5419 if (debug_displaced
)
5420 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
5421 "%.8lx\n", is_mov
? "move" : "ALU",
5422 (unsigned long) insn
);
5424 rn
= bits (insn
, 16, 19);
5425 rm
= bits (insn
, 0, 3);
5426 rs
= bits (insn
, 8, 11);
5427 rd
= bits (insn
, 12, 15);
5430 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
5432 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
5434 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
5439 /* Clean up load instructions. */
5442 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5443 arm_displaced_step_closure
*dsc
)
5445 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
5447 rt_val
= displaced_read_reg (regs
, dsc
, 0);
5448 if (dsc
->u
.ldst
.xfersize
== 8)
5449 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
5450 rn_val
= displaced_read_reg (regs
, dsc
, 2);
5452 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5453 if (dsc
->u
.ldst
.xfersize
> 4)
5454 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5455 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5456 if (!dsc
->u
.ldst
.immed
)
5457 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5459 /* Handle register writeback. */
5460 if (dsc
->u
.ldst
.writeback
)
5461 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5462 /* Put result in right place. */
5463 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
5464 if (dsc
->u
.ldst
.xfersize
== 8)
5465 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
5468 /* Clean up store instructions. */
5471 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5472 arm_displaced_step_closure
*dsc
)
5474 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
5476 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5477 if (dsc
->u
.ldst
.xfersize
> 4)
5478 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5479 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5480 if (!dsc
->u
.ldst
.immed
)
5481 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5482 if (!dsc
->u
.ldst
.restore_r4
)
5483 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
5486 if (dsc
->u
.ldst
.writeback
)
5487 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5490 /* Copy "extra" load/store instructions. These are halfword/doubleword
5491 transfers, which have a different encoding to byte/word transfers. */
5494 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unprivileged
,
5495 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5497 unsigned int op1
= bits (insn
, 20, 24);
5498 unsigned int op2
= bits (insn
, 5, 6);
5499 unsigned int rt
= bits (insn
, 12, 15);
5500 unsigned int rn
= bits (insn
, 16, 19);
5501 unsigned int rm
= bits (insn
, 0, 3);
5502 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5503 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5504 int immed
= (op1
& 0x4) != 0;
5506 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
5508 if (!insn_references_pc (insn
, 0x000ff00ful
))
5509 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
5511 if (debug_displaced
)
5512 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
5513 "insn %.8lx\n", unprivileged
? "unprivileged " : "",
5514 (unsigned long) insn
);
5516 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
5519 internal_error (__FILE__
, __LINE__
,
5520 _("copy_extra_ld_st: instruction decode error"));
5522 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5523 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5524 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5526 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5528 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5529 if (bytesize
[opcode
] == 8)
5530 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
5531 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5533 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5535 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5536 if (bytesize
[opcode
] == 8)
5537 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
5538 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5540 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5543 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
5544 dsc
->u
.ldst
.rn
= rn
;
5545 dsc
->u
.ldst
.immed
= immed
;
5546 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
5547 dsc
->u
.ldst
.restore_r4
= 0;
5550 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5552 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5553 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5555 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5557 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5558 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5560 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
5565 /* Copy byte/half word/word loads and stores. */
5568 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5569 arm_displaced_step_closure
*dsc
, int load
,
5570 int immed
, int writeback
, int size
, int usermode
,
5571 int rt
, int rm
, int rn
)
5573 ULONGEST rt_val
, rn_val
, rm_val
= 0;
5575 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5576 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5578 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5580 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
5582 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5583 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5585 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5587 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5588 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5590 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5592 dsc
->u
.ldst
.xfersize
= size
;
5593 dsc
->u
.ldst
.rn
= rn
;
5594 dsc
->u
.ldst
.immed
= immed
;
5595 dsc
->u
.ldst
.writeback
= writeback
;
5597 /* To write PC we can do:
5599 Before this sequence of instructions:
5600 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5601 r2 is the Rn value got from dispalced_read_reg.
5603 Insn1: push {pc} Write address of STR instruction + offset on stack
5604 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5605 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5606 = addr(Insn1) + offset - addr(Insn3) - 8
5608 Insn4: add r4, r4, #8 r4 = offset - 8
5609 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5611 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5613 Otherwise we don't know what value to write for PC, since the offset is
5614 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5615 of this can be found in Section "Saving from r15" in
5616 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5618 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5623 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
5624 uint16_t insn2
, struct regcache
*regs
,
5625 arm_displaced_step_closure
*dsc
, int size
)
5627 unsigned int u_bit
= bit (insn1
, 7);
5628 unsigned int rt
= bits (insn2
, 12, 15);
5629 int imm12
= bits (insn2
, 0, 11);
5632 if (debug_displaced
)
5633 fprintf_unfiltered (gdb_stdlog
,
5634 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5635 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
5641 /* Rewrite instruction LDR Rt imm12 into:
5643 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5647 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5650 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5651 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5652 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5654 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5656 pc_val
= pc_val
& 0xfffffffc;
5658 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
5659 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
5663 dsc
->u
.ldst
.xfersize
= size
;
5664 dsc
->u
.ldst
.immed
= 0;
5665 dsc
->u
.ldst
.writeback
= 0;
5666 dsc
->u
.ldst
.restore_r4
= 0;
5668 /* LDR R0, R2, R3 */
5669 dsc
->modinsn
[0] = 0xf852;
5670 dsc
->modinsn
[1] = 0x3;
5673 dsc
->cleanup
= &cleanup_load
;
5679 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5680 uint16_t insn2
, struct regcache
*regs
,
5681 arm_displaced_step_closure
*dsc
,
5682 int writeback
, int immed
)
5684 unsigned int rt
= bits (insn2
, 12, 15);
5685 unsigned int rn
= bits (insn1
, 0, 3);
5686 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
5687 /* In LDR (register), there is also a register Rm, which is not allowed to
5688 be PC, so we don't have to check it. */
5690 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
5691 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
5694 if (debug_displaced
)
5695 fprintf_unfiltered (gdb_stdlog
,
5696 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5697 rt
, rn
, insn1
, insn2
);
5699 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
5702 dsc
->u
.ldst
.restore_r4
= 0;
5705 /* ldr[b]<cond> rt, [rn, #imm], etc.
5707 ldr[b]<cond> r0, [r2, #imm]. */
5709 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5710 dsc
->modinsn
[1] = insn2
& 0x0fff;
5713 /* ldr[b]<cond> rt, [rn, rm], etc.
5715 ldr[b]<cond> r0, [r2, r3]. */
5717 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5718 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
5728 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
5729 struct regcache
*regs
,
5730 arm_displaced_step_closure
*dsc
,
5731 int load
, int size
, int usermode
)
5733 int immed
= !bit (insn
, 25);
5734 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
5735 unsigned int rt
= bits (insn
, 12, 15);
5736 unsigned int rn
= bits (insn
, 16, 19);
5737 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
5739 if (!insn_references_pc (insn
, 0x000ff00ful
))
5740 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
5742 if (debug_displaced
)
5743 fprintf_unfiltered (gdb_stdlog
,
5744 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5745 load
? (size
== 1 ? "ldrb" : "ldr")
5746 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
5748 (unsigned long) insn
);
5750 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
5751 usermode
, rt
, rm
, rn
);
5753 if (load
|| rt
!= ARM_PC_REGNUM
)
5755 dsc
->u
.ldst
.restore_r4
= 0;
5758 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5760 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5761 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5763 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5765 {ldr,str}[b]<cond> r0, [r2, r3]. */
5766 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5770 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5771 dsc
->u
.ldst
.restore_r4
= 1;
5772 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
5773 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
5774 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
5775 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
5776 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
5780 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
5782 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
5787 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5792 /* Cleanup LDM instructions with fully-populated register list. This is an
5793 unfortunate corner case: it's impossible to implement correctly by modifying
5794 the instruction. The issue is as follows: we have an instruction,
5798 which we must rewrite to avoid loading PC. A possible solution would be to
5799 do the load in two halves, something like (with suitable cleanup
5803 ldm[id][ab] r8!, {r0-r7}
5805 ldm[id][ab] r8, {r7-r14}
5808 but at present there's no suitable place for <temp>, since the scratch space
5809 is overwritten before the cleanup routine is called. For now, we simply
5810 emulate the instruction. */
5813 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5814 arm_displaced_step_closure
*dsc
)
5816 int inc
= dsc
->u
.block
.increment
;
5817 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
5818 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
5819 uint32_t regmask
= dsc
->u
.block
.regmask
;
5820 int regno
= inc
? 0 : 15;
5821 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
5822 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
5823 && (regmask
& 0x8000) != 0;
5824 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5825 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
5826 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5831 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5832 sensible we can do here. Complain loudly. */
5833 if (exception_return
)
5834 error (_("Cannot single-step exception return"));
5836 /* We don't handle any stores here for now. */
5837 gdb_assert (dsc
->u
.block
.load
!= 0);
5839 if (debug_displaced
)
5840 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
5841 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
5842 dsc
->u
.block
.increment
? "inc" : "dec",
5843 dsc
->u
.block
.before
? "before" : "after");
5850 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
5853 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
5856 xfer_addr
+= bump_before
;
5858 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
5859 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
5861 xfer_addr
+= bump_after
;
5863 regmask
&= ~(1 << regno
);
5866 if (dsc
->u
.block
.writeback
)
5867 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
5871 /* Clean up an STM which included the PC in the register list. */
5874 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5875 arm_displaced_step_closure
*dsc
)
5877 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5878 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5879 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
5880 CORE_ADDR stm_insn_addr
;
5883 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5885 /* If condition code fails, there's nothing else to do. */
5886 if (!store_executed
)
5889 if (dsc
->u
.block
.increment
)
5891 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
5893 if (dsc
->u
.block
.before
)
5898 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
5900 if (dsc
->u
.block
.before
)
5904 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
5905 stm_insn_addr
= dsc
->scratch_base
;
5906 offset
= pc_val
- stm_insn_addr
;
5908 if (debug_displaced
)
5909 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
5910 "STM instruction\n", offset
);
5912 /* Rewrite the stored PC to the proper value for the non-displaced original
5914 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
5915 dsc
->insn_addr
+ offset
);
5918 /* Clean up an LDM which includes the PC in the register list. We clumped all
5919 the registers in the transferred list into a contiguous range r0...rX (to
5920 avoid loading PC directly and losing control of the debugged program), so we
5921 must undo that here. */
5924 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
5925 struct regcache
*regs
,
5926 arm_displaced_step_closure
*dsc
)
5928 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5929 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5930 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
5931 unsigned int regs_loaded
= bitcount (mask
);
5932 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
5934 /* The method employed here will fail if the register list is fully populated
5935 (we need to avoid loading PC directly). */
5936 gdb_assert (num_to_shuffle
< 16);
5941 clobbered
= (1 << num_to_shuffle
) - 1;
5943 while (num_to_shuffle
> 0)
5945 if ((mask
& (1 << write_reg
)) != 0)
5947 unsigned int read_reg
= num_to_shuffle
- 1;
5949 if (read_reg
!= write_reg
)
5951 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
5952 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
5953 if (debug_displaced
)
5954 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
5955 "loaded register r%d to r%d\n"), read_reg
,
5958 else if (debug_displaced
)
5959 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
5960 "r%d already in the right place\n"),
5963 clobbered
&= ~(1 << write_reg
);
5971 /* Restore any registers we scribbled over. */
5972 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
5974 if ((clobbered
& (1 << write_reg
)) != 0)
5976 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
5978 if (debug_displaced
)
5979 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
5980 "clobbered register r%d\n"), write_reg
);
5981 clobbered
&= ~(1 << write_reg
);
5985 /* Perform register writeback manually. */
5986 if (dsc
->u
.block
.writeback
)
5988 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
5990 if (dsc
->u
.block
.increment
)
5991 new_rn_val
+= regs_loaded
* 4;
5993 new_rn_val
-= regs_loaded
* 4;
5995 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
6000 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6001 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6004 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
6005 struct regcache
*regs
,
6006 arm_displaced_step_closure
*dsc
)
6008 int load
= bit (insn
, 20);
6009 int user
= bit (insn
, 22);
6010 int increment
= bit (insn
, 23);
6011 int before
= bit (insn
, 24);
6012 int writeback
= bit (insn
, 21);
6013 int rn
= bits (insn
, 16, 19);
6015 /* Block transfers which don't mention PC can be run directly
6017 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
6018 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
6020 if (rn
== ARM_PC_REGNUM
)
6022 warning (_("displaced: Unpredictable LDM or STM with "
6023 "base register r15"));
6024 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
6027 if (debug_displaced
)
6028 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6029 "%.8lx\n", (unsigned long) insn
);
6031 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6032 dsc
->u
.block
.rn
= rn
;
6034 dsc
->u
.block
.load
= load
;
6035 dsc
->u
.block
.user
= user
;
6036 dsc
->u
.block
.increment
= increment
;
6037 dsc
->u
.block
.before
= before
;
6038 dsc
->u
.block
.writeback
= writeback
;
6039 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
6041 dsc
->u
.block
.regmask
= insn
& 0xffff;
6045 if ((insn
& 0xffff) == 0xffff)
6047 /* LDM with a fully-populated register list. This case is
6048 particularly tricky. Implement for now by fully emulating the
6049 instruction (which might not behave perfectly in all cases, but
6050 these instructions should be rare enough for that not to matter
6052 dsc
->modinsn
[0] = ARM_NOP
;
6054 dsc
->cleanup
= &cleanup_block_load_all
;
6058 /* LDM of a list of registers which includes PC. Implement by
6059 rewriting the list of registers to be transferred into a
6060 contiguous chunk r0...rX before doing the transfer, then shuffling
6061 registers into the correct places in the cleanup routine. */
6062 unsigned int regmask
= insn
& 0xffff;
6063 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
6066 for (i
= 0; i
< num_in_list
; i
++)
6067 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6069 /* Writeback makes things complicated. We need to avoid clobbering
6070 the base register with one of the registers in our modified
6071 register list, but just using a different register can't work in
6074 ldm r14!, {r0-r13,pc}
6076 which would need to be rewritten as:
6080 but that can't work, because there's no free register for N.
6082 Solve this by turning off the writeback bit, and emulating
6083 writeback manually in the cleanup routine. */
6088 new_regmask
= (1 << num_in_list
) - 1;
6090 if (debug_displaced
)
6091 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6092 "{..., pc}: original reg list %.4x, modified "
6093 "list %.4x\n"), rn
, writeback
? "!" : "",
6094 (int) insn
& 0xffff, new_regmask
);
6096 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
6098 dsc
->cleanup
= &cleanup_block_load_pc
;
6103 /* STM of a list of registers which includes PC. Run the instruction
6104 as-is, but out of line: this will store the wrong value for the PC,
6105 so we must manually fix up the memory in the cleanup routine.
6106 Doing things this way has the advantage that we can auto-detect
6107 the offset of the PC write (which is architecture-dependent) in
6108 the cleanup routine. */
6109 dsc
->modinsn
[0] = insn
;
6111 dsc
->cleanup
= &cleanup_block_store_pc
;
6118 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6119 struct regcache
*regs
,
6120 arm_displaced_step_closure
*dsc
)
6122 int rn
= bits (insn1
, 0, 3);
6123 int load
= bit (insn1
, 4);
6124 int writeback
= bit (insn1
, 5);
6126 /* Block transfers which don't mention PC can be run directly
6128 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
6129 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
6131 if (rn
== ARM_PC_REGNUM
)
6133 warning (_("displaced: Unpredictable LDM or STM with "
6134 "base register r15"));
6135 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6136 "unpredictable ldm/stm", dsc
);
6139 if (debug_displaced
)
6140 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6141 "%.4x%.4x\n", insn1
, insn2
);
6143 /* Clear bit 13, since it should be always zero. */
6144 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
6145 dsc
->u
.block
.rn
= rn
;
6147 dsc
->u
.block
.load
= load
;
6148 dsc
->u
.block
.user
= 0;
6149 dsc
->u
.block
.increment
= bit (insn1
, 7);
6150 dsc
->u
.block
.before
= bit (insn1
, 8);
6151 dsc
->u
.block
.writeback
= writeback
;
6152 dsc
->u
.block
.cond
= INST_AL
;
6153 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6157 if (dsc
->u
.block
.regmask
== 0xffff)
6159 /* This branch is impossible to happen. */
6164 unsigned int regmask
= dsc
->u
.block
.regmask
;
6165 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
6168 for (i
= 0; i
< num_in_list
; i
++)
6169 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6174 new_regmask
= (1 << num_in_list
) - 1;
6176 if (debug_displaced
)
6177 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6178 "{..., pc}: original reg list %.4x, modified "
6179 "list %.4x\n"), rn
, writeback
? "!" : "",
6180 (int) dsc
->u
.block
.regmask
, new_regmask
);
6182 dsc
->modinsn
[0] = insn1
;
6183 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
6186 dsc
->cleanup
= &cleanup_block_load_pc
;
6191 dsc
->modinsn
[0] = insn1
;
6192 dsc
->modinsn
[1] = insn2
;
6194 dsc
->cleanup
= &cleanup_block_store_pc
;
6199 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6200 This is used to avoid a dependency on BFD's bfd_endian enum. */
6203 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr
, int len
,
6206 return read_memory_unsigned_integer (memaddr
, len
,
6207 (enum bfd_endian
) byte_order
);
6210 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6213 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs
*self
,
6216 return gdbarch_addr_bits_remove (self
->regcache
->arch (), val
);
6219 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6222 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
)
6227 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6230 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs
*self
)
6232 return arm_is_thumb (self
->regcache
);
6235 /* single_step() is called just before we want to resume the inferior,
6236 if we want to single-step it but there is no hardware or kernel
6237 single-step support. We find the target of the coming instructions
6238 and breakpoint them. */
6240 std::vector
<CORE_ADDR
>
6241 arm_software_single_step (struct regcache
*regcache
)
6243 struct gdbarch
*gdbarch
= regcache
->arch ();
6244 struct arm_get_next_pcs next_pcs_ctx
;
6246 arm_get_next_pcs_ctor (&next_pcs_ctx
,
6247 &arm_get_next_pcs_ops
,
6248 gdbarch_byte_order (gdbarch
),
6249 gdbarch_byte_order_for_code (gdbarch
),
6253 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
6255 for (CORE_ADDR
&pc_ref
: next_pcs
)
6256 pc_ref
= gdbarch_addr_bits_remove (gdbarch
, pc_ref
);
6261 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6262 for Linux, where some SVC instructions must be treated specially. */
6265 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6266 arm_displaced_step_closure
*dsc
)
6268 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6270 if (debug_displaced
)
6271 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
6272 "%.8lx\n", (unsigned long) resume_addr
);
6274 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
6278 /* Common copy routine for svc instruciton. */
6281 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6282 arm_displaced_step_closure
*dsc
)
6284 /* Preparation: none.
6285 Insn: unmodified svc.
6286 Cleanup: pc <- insn_addr + insn_size. */
6288 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6290 dsc
->wrote_to_pc
= 1;
6292 /* Allow OS-specific code to override SVC handling. */
6293 if (dsc
->u
.svc
.copy_svc_os
)
6294 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
6297 dsc
->cleanup
= &cleanup_svc
;
6303 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
6304 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6307 if (debug_displaced
)
6308 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
6309 (unsigned long) insn
);
6311 dsc
->modinsn
[0] = insn
;
6313 return install_svc (gdbarch
, regs
, dsc
);
6317 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
6318 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6321 if (debug_displaced
)
6322 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
6325 dsc
->modinsn
[0] = insn
;
6327 return install_svc (gdbarch
, regs
, dsc
);
6330 /* Copy undefined instructions. */
6333 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
6334 arm_displaced_step_closure
*dsc
)
6336 if (debug_displaced
)
6337 fprintf_unfiltered (gdb_stdlog
,
6338 "displaced: copying undefined insn %.8lx\n",
6339 (unsigned long) insn
);
6341 dsc
->modinsn
[0] = insn
;
6347 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6348 arm_displaced_step_closure
*dsc
)
6351 if (debug_displaced
)
6352 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
6353 "%.4x %.4x\n", (unsigned short) insn1
,
6354 (unsigned short) insn2
);
6356 dsc
->modinsn
[0] = insn1
;
6357 dsc
->modinsn
[1] = insn2
;
6363 /* Copy unpredictable instructions. */
6366 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
6367 arm_displaced_step_closure
*dsc
)
6369 if (debug_displaced
)
6370 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
6371 "%.8lx\n", (unsigned long) insn
);
6373 dsc
->modinsn
[0] = insn
;
6378 /* The decode_* functions are instruction decoding helpers. They mostly follow
6379 the presentation in the ARM ARM. */
6382 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
6383 struct regcache
*regs
,
6384 arm_displaced_step_closure
*dsc
)
6386 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
6387 unsigned int rn
= bits (insn
, 16, 19);
6389 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0x1) == 0x0)
6390 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
6391 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0x1) == 0x1)
6392 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
6393 else if ((op1
& 0x60) == 0x20)
6394 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
6395 else if ((op1
& 0x71) == 0x40)
6396 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
6398 else if ((op1
& 0x77) == 0x41)
6399 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6400 else if ((op1
& 0x77) == 0x45)
6401 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
6402 else if ((op1
& 0x77) == 0x51)
6405 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6407 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6409 else if ((op1
& 0x77) == 0x55)
6410 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6411 else if (op1
== 0x57)
6414 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
6415 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
6416 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
6417 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
6418 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
6420 else if ((op1
& 0x63) == 0x43)
6421 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6422 else if ((op2
& 0x1) == 0x0)
6423 switch (op1
& ~0x80)
6426 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6428 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
6429 case 0x71: case 0x75:
6431 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
6432 case 0x63: case 0x67: case 0x73: case 0x77:
6433 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6435 return arm_copy_undef (gdbarch
, insn
, dsc
);
6438 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
6442 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
6443 struct regcache
*regs
,
6444 arm_displaced_step_closure
*dsc
)
6446 if (bit (insn
, 27) == 0)
6447 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
6448 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6449 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
6452 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
6455 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
6457 case 0x4: case 0x5: case 0x6: case 0x7:
6458 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6461 switch ((insn
& 0xe00000) >> 21)
6463 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6465 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6468 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6471 return arm_copy_undef (gdbarch
, insn
, dsc
);
6476 int rn_f
= (bits (insn
, 16, 19) == 0xf);
6477 switch ((insn
& 0xe00000) >> 21)
6480 /* ldc/ldc2 imm (undefined for rn == pc). */
6481 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
6482 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6485 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6487 case 0x4: case 0x5: case 0x6: case 0x7:
6488 /* ldc/ldc2 lit (undefined for rn != pc). */
6489 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
6490 : arm_copy_undef (gdbarch
, insn
, dsc
);
6493 return arm_copy_undef (gdbarch
, insn
, dsc
);
6498 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
6501 if (bits (insn
, 16, 19) == 0xf)
6503 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6505 return arm_copy_undef (gdbarch
, insn
, dsc
);
6509 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6511 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6515 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6517 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6520 return arm_copy_undef (gdbarch
, insn
, dsc
);
6524 /* Decode miscellaneous instructions in dp/misc encoding space. */
6527 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
6528 struct regcache
*regs
,
6529 arm_displaced_step_closure
*dsc
)
6531 unsigned int op2
= bits (insn
, 4, 6);
6532 unsigned int op
= bits (insn
, 21, 22);
6537 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
6540 if (op
== 0x1) /* bx. */
6541 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
6543 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
6545 return arm_copy_undef (gdbarch
, insn
, dsc
);
6549 /* Not really supported. */
6550 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
6552 return arm_copy_undef (gdbarch
, insn
, dsc
);
6556 return arm_copy_bx_blx_reg (gdbarch
, insn
,
6557 regs
, dsc
); /* blx register. */
6559 return arm_copy_undef (gdbarch
, insn
, dsc
);
6562 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
6566 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
6568 /* Not really supported. */
6569 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
6573 return arm_copy_undef (gdbarch
, insn
, dsc
);
6578 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
6579 struct regcache
*regs
,
6580 arm_displaced_step_closure
*dsc
)
6583 switch (bits (insn
, 20, 24))
6586 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
6589 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
6591 case 0x12: case 0x16:
6592 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
6595 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
6599 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
6601 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
6602 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
6603 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
6604 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
6605 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
6606 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
6607 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
6608 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
6609 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
6610 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
6611 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
6612 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
6613 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
6614 /* 2nd arg means "unprivileged". */
6615 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
6619 /* Should be unreachable. */
6624 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
6625 struct regcache
*regs
,
6626 arm_displaced_step_closure
*dsc
)
6628 int a
= bit (insn
, 25), b
= bit (insn
, 4);
6629 uint32_t op1
= bits (insn
, 20, 24);
6631 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
6632 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
6633 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
6634 else if ((!a
&& (op1
& 0x17) == 0x02)
6635 || (a
&& (op1
& 0x17) == 0x02 && !b
))
6636 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
6637 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
6638 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
6639 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
6640 else if ((!a
&& (op1
& 0x17) == 0x03)
6641 || (a
&& (op1
& 0x17) == 0x03 && !b
))
6642 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
6643 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
6644 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
6645 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
6646 else if ((!a
&& (op1
& 0x17) == 0x06)
6647 || (a
&& (op1
& 0x17) == 0x06 && !b
))
6648 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
6649 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
6650 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
6651 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
6652 else if ((!a
&& (op1
& 0x17) == 0x07)
6653 || (a
&& (op1
& 0x17) == 0x07 && !b
))
6654 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
6656 /* Should be unreachable. */
6661 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
6662 arm_displaced_step_closure
*dsc
)
6664 switch (bits (insn
, 20, 24))
6666 case 0x00: case 0x01: case 0x02: case 0x03:
6667 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
6669 case 0x04: case 0x05: case 0x06: case 0x07:
6670 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
6672 case 0x08: case 0x09: case 0x0a: case 0x0b:
6673 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6674 return arm_copy_unmodified (gdbarch
, insn
,
6675 "decode/pack/unpack/saturate/reverse", dsc
);
6678 if (bits (insn
, 5, 7) == 0) /* op2. */
6680 if (bits (insn
, 12, 15) == 0xf)
6681 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
6683 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
6686 return arm_copy_undef (gdbarch
, insn
, dsc
);
6688 case 0x1a: case 0x1b:
6689 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6690 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
6692 return arm_copy_undef (gdbarch
, insn
, dsc
);
6694 case 0x1c: case 0x1d:
6695 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
6697 if (bits (insn
, 0, 3) == 0xf)
6698 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
6700 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
6703 return arm_copy_undef (gdbarch
, insn
, dsc
);
6705 case 0x1e: case 0x1f:
6706 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6707 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
6709 return arm_copy_undef (gdbarch
, insn
, dsc
);
6712 /* Should be unreachable. */
6717 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, uint32_t insn
,
6718 struct regcache
*regs
,
6719 arm_displaced_step_closure
*dsc
)
6722 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6724 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
6728 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
6729 struct regcache
*regs
,
6730 arm_displaced_step_closure
*dsc
)
6732 unsigned int opcode
= bits (insn
, 20, 24);
6736 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6737 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
6739 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6740 case 0x12: case 0x16:
6741 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
6743 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6744 case 0x13: case 0x17:
6745 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
6747 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6748 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6749 /* Note: no writeback for these instructions. Bit 25 will always be
6750 zero though (via caller), so the following works OK. */
6751 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6754 /* Should be unreachable. */
6758 /* Decode shifted register instructions. */
6761 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
6762 uint16_t insn2
, struct regcache
*regs
,
6763 arm_displaced_step_closure
*dsc
)
6765 /* PC is only allowed to be used in instruction MOV. */
6767 unsigned int op
= bits (insn1
, 5, 8);
6768 unsigned int rn
= bits (insn1
, 0, 3);
6770 if (op
== 0x2 && rn
== 0xf) /* MOV */
6771 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
6773 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6774 "dp (shift reg)", dsc
);
6778 /* Decode extension register load/store. Exactly the same as
6779 arm_decode_ext_reg_ld_st. */
6782 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
6783 uint16_t insn2
, struct regcache
*regs
,
6784 arm_displaced_step_closure
*dsc
)
6786 unsigned int opcode
= bits (insn1
, 4, 8);
6790 case 0x04: case 0x05:
6791 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6792 "vfp/neon vmov", dsc
);
6794 case 0x08: case 0x0c: /* 01x00 */
6795 case 0x0a: case 0x0e: /* 01x10 */
6796 case 0x12: case 0x16: /* 10x10 */
6797 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6798 "vfp/neon vstm/vpush", dsc
);
6800 case 0x09: case 0x0d: /* 01x01 */
6801 case 0x0b: case 0x0f: /* 01x11 */
6802 case 0x13: case 0x17: /* 10x11 */
6803 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6804 "vfp/neon vldm/vpop", dsc
);
6806 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6807 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6809 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6810 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
6813 /* Should be unreachable. */
6818 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
,
6819 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6821 unsigned int op1
= bits (insn
, 20, 25);
6822 int op
= bit (insn
, 4);
6823 unsigned int coproc
= bits (insn
, 8, 11);
6825 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
6826 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
6827 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
6828 && (coproc
& 0xe) != 0xa)
6830 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6831 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
6832 && (coproc
& 0xe) != 0xa)
6833 /* ldc/ldc2 imm/lit. */
6834 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6835 else if ((op1
& 0x3e) == 0x00)
6836 return arm_copy_undef (gdbarch
, insn
, dsc
);
6837 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
6838 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
6839 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
6840 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6841 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
6842 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6843 else if ((op1
& 0x30) == 0x20 && !op
)
6845 if ((coproc
& 0xe) == 0xa)
6846 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
6848 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6850 else if ((op1
& 0x30) == 0x20 && op
)
6851 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
6852 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
6853 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6854 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
6855 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6856 else if ((op1
& 0x30) == 0x30)
6857 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
6859 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
6863 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
6864 uint16_t insn2
, struct regcache
*regs
,
6865 arm_displaced_step_closure
*dsc
)
6867 unsigned int coproc
= bits (insn2
, 8, 11);
6868 unsigned int bit_5_8
= bits (insn1
, 5, 8);
6869 unsigned int bit_9
= bit (insn1
, 9);
6870 unsigned int bit_4
= bit (insn1
, 4);
6875 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6876 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6878 else if (bit_5_8
== 0) /* UNDEFINED. */
6879 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
6882 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6883 if ((coproc
& 0xe) == 0xa)
6884 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
6886 else /* coproc is not 101x. */
6888 if (bit_4
== 0) /* STC/STC2. */
6889 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6891 else /* LDC/LDC2 {literal, immeidate}. */
6892 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
6898 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
6904 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6905 arm_displaced_step_closure
*dsc
, int rd
)
6911 Preparation: Rd <- PC
6917 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6918 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
6922 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6923 arm_displaced_step_closure
*dsc
,
6924 int rd
, unsigned int imm
)
6927 /* Encoding T2: ADDS Rd, #imm */
6928 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
6930 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6936 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
6937 struct regcache
*regs
,
6938 arm_displaced_step_closure
*dsc
)
6940 unsigned int rd
= bits (insn
, 8, 10);
6941 unsigned int imm8
= bits (insn
, 0, 7);
6943 if (debug_displaced
)
6944 fprintf_unfiltered (gdb_stdlog
,
6945 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6948 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
6952 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
6953 uint16_t insn2
, struct regcache
*regs
,
6954 arm_displaced_step_closure
*dsc
)
6956 unsigned int rd
= bits (insn2
, 8, 11);
6957 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6958 extract raw immediate encoding rather than computing immediate. When
6959 generating ADD or SUB instruction, we can simply perform OR operation to
6960 set immediate into ADD. */
6961 unsigned int imm_3_8
= insn2
& 0x70ff;
6962 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
6964 if (debug_displaced
)
6965 fprintf_unfiltered (gdb_stdlog
,
6966 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6967 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
6969 if (bit (insn1
, 7)) /* Encoding T2 */
6971 /* Encoding T3: SUB Rd, Rd, #imm */
6972 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
6973 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6975 else /* Encoding T3 */
6977 /* Encoding T3: ADD Rd, Rd, #imm */
6978 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
6979 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6983 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6989 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6990 struct regcache
*regs
,
6991 arm_displaced_step_closure
*dsc
)
6993 unsigned int rt
= bits (insn1
, 8, 10);
6995 int imm8
= (bits (insn1
, 0, 7) << 2);
7001 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7003 Insn: LDR R0, [R2, R3];
7004 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7006 if (debug_displaced
)
7007 fprintf_unfiltered (gdb_stdlog
,
7008 "displaced: copying thumb ldr r%d [pc #%d]\n"
7011 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
7012 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
7013 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
7014 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7015 /* The assembler calculates the required value of the offset from the
7016 Align(PC,4) value of this instruction to the label. */
7017 pc
= pc
& 0xfffffffc;
7019 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
7020 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
7023 dsc
->u
.ldst
.xfersize
= 4;
7025 dsc
->u
.ldst
.immed
= 0;
7026 dsc
->u
.ldst
.writeback
= 0;
7027 dsc
->u
.ldst
.restore_r4
= 0;
7029 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7031 dsc
->cleanup
= &cleanup_load
;
7036 /* Copy Thumb cbnz/cbz insruction. */
7039 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
7040 struct regcache
*regs
,
7041 arm_displaced_step_closure
*dsc
)
7043 int non_zero
= bit (insn1
, 11);
7044 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
7045 CORE_ADDR from
= dsc
->insn_addr
;
7046 int rn
= bits (insn1
, 0, 2);
7047 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
7049 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
7050 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7051 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7052 condition is false, let it be, cleanup_branch will do nothing. */
7053 if (dsc
->u
.branch
.cond
)
7055 dsc
->u
.branch
.cond
= INST_AL
;
7056 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
7059 dsc
->u
.branch
.dest
= from
+ 2;
7061 dsc
->u
.branch
.link
= 0;
7062 dsc
->u
.branch
.exchange
= 0;
7064 if (debug_displaced
)
7065 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
7066 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
7067 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
7069 dsc
->modinsn
[0] = THUMB_NOP
;
7071 dsc
->cleanup
= &cleanup_branch
;
7075 /* Copy Table Branch Byte/Halfword */
7077 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
7078 uint16_t insn2
, struct regcache
*regs
,
7079 arm_displaced_step_closure
*dsc
)
7081 ULONGEST rn_val
, rm_val
;
7082 int is_tbh
= bit (insn2
, 4);
7083 CORE_ADDR halfwords
= 0;
7084 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7086 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
7087 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
7093 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
7094 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
7100 target_read_memory (rn_val
+ rm_val
, buf
, 1);
7101 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
7104 if (debug_displaced
)
7105 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
7106 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
7107 (unsigned int) rn_val
, (unsigned int) rm_val
,
7108 (unsigned int) halfwords
);
7110 dsc
->u
.branch
.cond
= INST_AL
;
7111 dsc
->u
.branch
.link
= 0;
7112 dsc
->u
.branch
.exchange
= 0;
7113 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
7115 dsc
->cleanup
= &cleanup_branch
;
7121 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7122 arm_displaced_step_closure
*dsc
)
7125 int val
= displaced_read_reg (regs
, dsc
, 7);
7126 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
7129 val
= displaced_read_reg (regs
, dsc
, 8);
7130 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
7133 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
7138 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7139 struct regcache
*regs
,
7140 arm_displaced_step_closure
*dsc
)
7142 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
7144 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7147 (1) register list is full, that is, r0-r7 are used.
7148 Prepare: tmp[0] <- r8
7150 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7151 MOV r8, r7; Move value of r7 to r8;
7152 POP {r7}; Store PC value into r7.
7154 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7156 (2) register list is not full, supposing there are N registers in
7157 register list (except PC, 0 <= N <= 7).
7158 Prepare: for each i, 0 - N, tmp[i] <- ri.
7160 POP {r0, r1, ...., rN};
7162 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7163 from tmp[] properly.
7165 if (debug_displaced
)
7166 fprintf_unfiltered (gdb_stdlog
,
7167 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7168 dsc
->u
.block
.regmask
, insn1
);
7170 if (dsc
->u
.block
.regmask
== 0xff)
7172 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
7174 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
7175 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
7176 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
7179 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
7183 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
7185 unsigned int new_regmask
;
7187 for (i
= 0; i
< num_in_list
+ 1; i
++)
7188 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7190 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
7192 if (debug_displaced
)
7193 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
7194 "{..., pc}: original reg list %.4x,"
7195 " modified list %.4x\n"),
7196 (int) dsc
->u
.block
.regmask
, new_regmask
);
7198 dsc
->u
.block
.regmask
|= 0x8000;
7199 dsc
->u
.block
.writeback
= 0;
7200 dsc
->u
.block
.cond
= INST_AL
;
7202 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
7204 dsc
->cleanup
= &cleanup_block_load_pc
;
7211 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7212 struct regcache
*regs
,
7213 arm_displaced_step_closure
*dsc
)
7215 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
7216 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
7219 /* 16-bit thumb instructions. */
7220 switch (op_bit_12_15
)
7222 /* Shift (imme), add, subtract, move and compare. */
7223 case 0: case 1: case 2: case 3:
7224 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7225 "shift/add/sub/mov/cmp",
7229 switch (op_bit_10_11
)
7231 case 0: /* Data-processing */
7232 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7236 case 1: /* Special data instructions and branch and exchange. */
7238 unsigned short op
= bits (insn1
, 7, 9);
7239 if (op
== 6 || op
== 7) /* BX or BLX */
7240 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
7241 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7242 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
7244 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
7248 default: /* LDR (literal) */
7249 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
7252 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7253 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
7256 if (op_bit_10_11
< 2) /* Generate PC-relative address */
7257 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
7258 else /* Generate SP-relative address */
7259 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
7261 case 11: /* Misc 16-bit instructions */
7263 switch (bits (insn1
, 8, 11))
7265 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7266 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
7268 case 12: case 13: /* POP */
7269 if (bit (insn1
, 8)) /* PC is in register list. */
7270 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
7272 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
7274 case 15: /* If-Then, and hints */
7275 if (bits (insn1
, 0, 3))
7276 /* If-Then makes up to four following instructions conditional.
7277 IT instruction itself is not conditional, so handle it as a
7278 common unmodified instruction. */
7279 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
7282 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
7285 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
7290 if (op_bit_10_11
< 2) /* Store multiple registers */
7291 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
7292 else /* Load multiple registers */
7293 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
7295 case 13: /* Conditional branch and supervisor call */
7296 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
7297 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7299 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
7301 case 14: /* Unconditional branch */
7302 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7309 internal_error (__FILE__
, __LINE__
,
7310 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7314 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
7315 uint16_t insn1
, uint16_t insn2
,
7316 struct regcache
*regs
,
7317 arm_displaced_step_closure
*dsc
)
7319 int rt
= bits (insn2
, 12, 15);
7320 int rn
= bits (insn1
, 0, 3);
7321 int op1
= bits (insn1
, 7, 8);
7323 switch (bits (insn1
, 5, 6))
7325 case 0: /* Load byte and memory hints */
7326 if (rt
== 0xf) /* PLD/PLI */
7329 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7330 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
7332 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7337 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
7338 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7341 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7342 "ldrb{reg, immediate}/ldrbt",
7347 case 1: /* Load halfword and memory hints. */
7348 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
7349 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7350 "pld/unalloc memhint", dsc
);
7354 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7357 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7361 case 2: /* Load word */
7363 int insn2_bit_8_11
= bits (insn2
, 8, 11);
7366 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
7367 else if (op1
== 0x1) /* Encoding T3 */
7368 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
7370 else /* op1 == 0x0 */
7372 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
7373 /* LDR (immediate) */
7374 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7375 dsc
, bit (insn2
, 8), 1);
7376 else if (insn2_bit_8_11
== 0xe) /* LDRT */
7377 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7380 /* LDR (register) */
7381 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7387 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7394 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7395 uint16_t insn2
, struct regcache
*regs
,
7396 arm_displaced_step_closure
*dsc
)
7399 unsigned short op
= bit (insn2
, 15);
7400 unsigned int op1
= bits (insn1
, 11, 12);
7406 switch (bits (insn1
, 9, 10))
7411 /* Load/store {dual, execlusive}, table branch. */
7412 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
7413 && bits (insn2
, 5, 7) == 0)
7414 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
7417 /* PC is not allowed to use in load/store {dual, exclusive}
7419 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7420 "load/store dual/ex", dsc
);
7422 else /* load/store multiple */
7424 switch (bits (insn1
, 7, 8))
7426 case 0: case 3: /* SRS, RFE */
7427 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7430 case 1: case 2: /* LDM/STM/PUSH/POP */
7431 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
7438 /* Data-processing (shift register). */
7439 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
7442 default: /* Coprocessor instructions. */
7443 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7448 case 2: /* op1 = 2 */
7449 if (op
) /* Branch and misc control. */
7451 if (bit (insn2
, 14) /* BLX/BL */
7452 || bit (insn2
, 12) /* Unconditional branch */
7453 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
7454 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
7456 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7461 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
7463 int dp_op
= bits (insn1
, 4, 8);
7464 int rn
= bits (insn1
, 0, 3);
7465 if ((dp_op
== 0 || dp_op
== 0xa) && rn
== 0xf)
7466 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
7469 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7472 else /* Data processing (modified immeidate) */
7473 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7477 case 3: /* op1 = 3 */
7478 switch (bits (insn1
, 9, 10))
7482 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
7484 else /* NEON Load/Store and Store single data item */
7485 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7486 "neon elt/struct load/store",
7489 case 1: /* op1 = 3, bits (9, 10) == 1 */
7490 switch (bits (insn1
, 7, 8))
7492 case 0: case 1: /* Data processing (register) */
7493 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7496 case 2: /* Multiply and absolute difference */
7497 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7498 "mul/mua/diff", dsc
);
7500 case 3: /* Long multiply and divide */
7501 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7506 default: /* Coprocessor instructions */
7507 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7516 internal_error (__FILE__
, __LINE__
,
7517 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7522 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7523 struct regcache
*regs
,
7524 arm_displaced_step_closure
*dsc
)
7526 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7528 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
7530 if (debug_displaced
)
7531 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
7532 "at %.8lx\n", insn1
, (unsigned long) from
);
7535 dsc
->insn_size
= thumb_insn_size (insn1
);
7536 if (thumb_insn_size (insn1
) == 4)
7539 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
7540 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
7543 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
7547 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7548 CORE_ADDR to
, struct regcache
*regs
,
7549 arm_displaced_step_closure
*dsc
)
7552 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7555 /* Most displaced instructions use a 1-instruction scratch space, so set this
7556 here and override below if/when necessary. */
7558 dsc
->insn_addr
= from
;
7559 dsc
->scratch_base
= to
;
7560 dsc
->cleanup
= NULL
;
7561 dsc
->wrote_to_pc
= 0;
7563 if (!displaced_in_arm_mode (regs
))
7564 return thumb_process_displaced_insn (gdbarch
, from
, regs
, dsc
);
7568 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
7569 if (debug_displaced
)
7570 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
7571 "at %.8lx\n", (unsigned long) insn
,
7572 (unsigned long) from
);
7574 if ((insn
& 0xf0000000) == 0xf0000000)
7575 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
7576 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
7578 case 0x0: case 0x1: case 0x2: case 0x3:
7579 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
7582 case 0x4: case 0x5: case 0x6:
7583 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
7587 err
= arm_decode_media (gdbarch
, insn
, dsc
);
7590 case 0x8: case 0x9: case 0xa: case 0xb:
7591 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
7594 case 0xc: case 0xd: case 0xe: case 0xf:
7595 err
= arm_decode_svc_copro (gdbarch
, insn
, regs
, dsc
);
7600 internal_error (__FILE__
, __LINE__
,
7601 _("arm_process_displaced_insn: Instruction decode error"));
7604 /* Actually set up the scratch space for a displaced instruction. */
7607 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7608 CORE_ADDR to
, arm_displaced_step_closure
*dsc
)
7610 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7611 unsigned int i
, len
, offset
;
7612 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7613 int size
= dsc
->is_thumb
? 2 : 4;
7614 const gdb_byte
*bkp_insn
;
7617 /* Poke modified instruction(s). */
7618 for (i
= 0; i
< dsc
->numinsns
; i
++)
7620 if (debug_displaced
)
7622 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
7624 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
7627 fprintf_unfiltered (gdb_stdlog
, "%.4x",
7628 (unsigned short)dsc
->modinsn
[i
]);
7630 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
7631 (unsigned long) to
+ offset
);
7634 write_memory_unsigned_integer (to
+ offset
, size
,
7635 byte_order_for_code
,
7640 /* Choose the correct breakpoint instruction. */
7643 bkp_insn
= tdep
->thumb_breakpoint
;
7644 len
= tdep
->thumb_breakpoint_size
;
7648 bkp_insn
= tdep
->arm_breakpoint
;
7649 len
= tdep
->arm_breakpoint_size
;
7652 /* Put breakpoint afterwards. */
7653 write_memory (to
+ offset
, bkp_insn
, len
);
7655 if (debug_displaced
)
7656 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
7657 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
7660 /* Entry point for cleaning things up after a displaced instruction has been
7664 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
7665 struct displaced_step_closure
*dsc_
,
7666 CORE_ADDR from
, CORE_ADDR to
,
7667 struct regcache
*regs
)
7669 arm_displaced_step_closure
*dsc
= (arm_displaced_step_closure
*) dsc_
;
7672 dsc
->cleanup (gdbarch
, regs
, dsc
);
7674 if (!dsc
->wrote_to_pc
)
7675 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
7676 dsc
->insn_addr
+ dsc
->insn_size
);
7680 #include "bfd-in2.h"
7681 #include "libcoff.h"
7684 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
7686 gdb_disassembler
*di
7687 = static_cast<gdb_disassembler
*>(info
->application_data
);
7688 struct gdbarch
*gdbarch
= di
->arch ();
7690 if (arm_pc_is_thumb (gdbarch
, memaddr
))
7692 static asymbol
*asym
;
7693 static combined_entry_type ce
;
7694 static struct coff_symbol_struct csym
;
7695 static struct bfd fake_bfd
;
7696 static bfd_target fake_target
;
7698 if (csym
.native
== NULL
)
7700 /* Create a fake symbol vector containing a Thumb symbol.
7701 This is solely so that the code in print_insn_little_arm()
7702 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7703 the presence of a Thumb symbol and switch to decoding
7704 Thumb instructions. */
7706 fake_target
.flavour
= bfd_target_coff_flavour
;
7707 fake_bfd
.xvec
= &fake_target
;
7708 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
7710 csym
.symbol
.the_bfd
= &fake_bfd
;
7711 csym
.symbol
.name
= "fake";
7712 asym
= (asymbol
*) & csym
;
7715 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
7716 info
->symbols
= &asym
;
7719 info
->symbols
= NULL
;
7721 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7722 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7723 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7724 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7725 in default_print_insn. */
7726 if (exec_bfd
!= NULL
)
7727 info
->flags
|= USER_SPECIFIED_MACHINE_TYPE
;
7729 return default_print_insn (memaddr
, info
);
7732 /* The following define instruction sequences that will cause ARM
7733 cpu's to take an undefined instruction trap. These are used to
7734 signal a breakpoint to GDB.
7736 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7737 modes. A different instruction is required for each mode. The ARM
7738 cpu's can also be big or little endian. Thus four different
7739 instructions are needed to support all cases.
7741 Note: ARMv4 defines several new instructions that will take the
7742 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7743 not in fact add the new instructions. The new undefined
7744 instructions in ARMv4 are all instructions that had no defined
7745 behaviour in earlier chips. There is no guarantee that they will
7746 raise an exception, but may be treated as NOP's. In practice, it
7747 may only safe to rely on instructions matching:
7749 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7750 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7751 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7753 Even this may only true if the condition predicate is true. The
7754 following use a condition predicate of ALWAYS so it is always TRUE.
7756 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7757 and NetBSD all use a software interrupt rather than an undefined
7758 instruction to force a trap. This can be handled by by the
7759 abi-specific code during establishment of the gdbarch vector. */
7761 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7762 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7763 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7764 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7766 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
7767 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
7768 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
7769 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
7771 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7774 arm_breakpoint_kind_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
)
7776 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7777 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7779 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
7781 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
7783 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7784 check whether we are replacing a 32-bit instruction. */
7785 if (tdep
->thumb2_breakpoint
!= NULL
)
7789 if (target_read_memory (*pcptr
, buf
, 2) == 0)
7791 unsigned short inst1
;
7793 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
7794 if (thumb_insn_size (inst1
) == 4)
7795 return ARM_BP_KIND_THUMB2
;
7799 return ARM_BP_KIND_THUMB
;
7802 return ARM_BP_KIND_ARM
;
7806 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7808 static const gdb_byte
*
7809 arm_sw_breakpoint_from_kind (struct gdbarch
*gdbarch
, int kind
, int *size
)
7811 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7815 case ARM_BP_KIND_ARM
:
7816 *size
= tdep
->arm_breakpoint_size
;
7817 return tdep
->arm_breakpoint
;
7818 case ARM_BP_KIND_THUMB
:
7819 *size
= tdep
->thumb_breakpoint_size
;
7820 return tdep
->thumb_breakpoint
;
7821 case ARM_BP_KIND_THUMB2
:
7822 *size
= tdep
->thumb2_breakpoint_size
;
7823 return tdep
->thumb2_breakpoint
;
7825 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7829 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7832 arm_breakpoint_kind_from_current_state (struct gdbarch
*gdbarch
,
7833 struct regcache
*regcache
,
7838 /* Check the memory pointed by PC is readable. */
7839 if (target_read_memory (regcache_read_pc (regcache
), buf
, 4) == 0)
7841 struct arm_get_next_pcs next_pcs_ctx
;
7843 arm_get_next_pcs_ctor (&next_pcs_ctx
,
7844 &arm_get_next_pcs_ops
,
7845 gdbarch_byte_order (gdbarch
),
7846 gdbarch_byte_order_for_code (gdbarch
),
7850 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
7852 /* If MEMADDR is the next instruction of current pc, do the
7853 software single step computation, and get the thumb mode by
7854 the destination address. */
7855 for (CORE_ADDR pc
: next_pcs
)
7857 if (UNMAKE_THUMB_ADDR (pc
) == *pcptr
)
7859 if (IS_THUMB_ADDR (pc
))
7861 *pcptr
= MAKE_THUMB_ADDR (*pcptr
);
7862 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7865 return ARM_BP_KIND_ARM
;
7870 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7873 /* Extract from an array REGBUF containing the (raw) register state a
7874 function return value of type TYPE, and copy that, in virtual
7875 format, into VALBUF. */
7878 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
7881 struct gdbarch
*gdbarch
= regs
->arch ();
7882 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7884 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
7886 switch (gdbarch_tdep (gdbarch
)->fp_model
)
7890 /* The value is in register F0 in internal format. We need to
7891 extract the raw value and then convert it to the desired
7893 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
7895 regs
->cooked_read (ARM_F0_REGNUM
, tmpbuf
);
7896 target_float_convert (tmpbuf
, arm_ext_type (gdbarch
),
7901 case ARM_FLOAT_SOFT_FPA
:
7902 case ARM_FLOAT_SOFT_VFP
:
7903 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7904 not using the VFP ABI code. */
7906 regs
->cooked_read (ARM_A1_REGNUM
, valbuf
);
7907 if (TYPE_LENGTH (type
) > 4)
7908 regs
->cooked_read (ARM_A1_REGNUM
+ 1, valbuf
+ INT_REGISTER_SIZE
);
7912 internal_error (__FILE__
, __LINE__
,
7913 _("arm_extract_return_value: "
7914 "Floating point model not supported"));
7918 else if (TYPE_CODE (type
) == TYPE_CODE_INT
7919 || TYPE_CODE (type
) == TYPE_CODE_CHAR
7920 || TYPE_CODE (type
) == TYPE_CODE_BOOL
7921 || TYPE_CODE (type
) == TYPE_CODE_PTR
7922 || TYPE_IS_REFERENCE (type
)
7923 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
7925 /* If the type is a plain integer, then the access is
7926 straight-forward. Otherwise we have to play around a bit
7928 int len
= TYPE_LENGTH (type
);
7929 int regno
= ARM_A1_REGNUM
;
7934 /* By using store_unsigned_integer we avoid having to do
7935 anything special for small big-endian values. */
7936 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
7937 store_unsigned_integer (valbuf
,
7938 (len
> INT_REGISTER_SIZE
7939 ? INT_REGISTER_SIZE
: len
),
7941 len
-= INT_REGISTER_SIZE
;
7942 valbuf
+= INT_REGISTER_SIZE
;
7947 /* For a structure or union the behaviour is as if the value had
7948 been stored to word-aligned memory and then loaded into
7949 registers with 32-bit load instruction(s). */
7950 int len
= TYPE_LENGTH (type
);
7951 int regno
= ARM_A1_REGNUM
;
7952 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
7956 regs
->cooked_read (regno
++, tmpbuf
);
7957 memcpy (valbuf
, tmpbuf
,
7958 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
7959 len
-= INT_REGISTER_SIZE
;
7960 valbuf
+= INT_REGISTER_SIZE
;
7966 /* Will a function return an aggregate type in memory or in a
7967 register? Return 0 if an aggregate type can be returned in a
7968 register, 1 if it must be returned in memory. */
7971 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
7973 enum type_code code
;
7975 type
= check_typedef (type
);
7977 /* Simple, non-aggregate types (ie not including vectors and
7978 complex) are always returned in a register (or registers). */
7979 code
= TYPE_CODE (type
);
7980 if (TYPE_CODE_STRUCT
!= code
&& TYPE_CODE_UNION
!= code
7981 && TYPE_CODE_ARRAY
!= code
&& TYPE_CODE_COMPLEX
!= code
)
7984 if (TYPE_CODE_ARRAY
== code
&& TYPE_VECTOR (type
))
7986 /* Vector values should be returned using ARM registers if they
7987 are not over 16 bytes. */
7988 return (TYPE_LENGTH (type
) > 16);
7991 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
7993 /* The AAPCS says all aggregates not larger than a word are returned
7995 if (TYPE_LENGTH (type
) <= INT_REGISTER_SIZE
)
8004 /* All aggregate types that won't fit in a register must be returned
8006 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
8009 /* In the ARM ABI, "integer" like aggregate types are returned in
8010 registers. For an aggregate type to be integer like, its size
8011 must be less than or equal to INT_REGISTER_SIZE and the
8012 offset of each addressable subfield must be zero. Note that bit
8013 fields are not addressable, and all addressable subfields of
8014 unions always start at offset zero.
8016 This function is based on the behaviour of GCC 2.95.1.
8017 See: gcc/arm.c: arm_return_in_memory() for details.
8019 Note: All versions of GCC before GCC 2.95.2 do not set up the
8020 parameters correctly for a function returning the following
8021 structure: struct { float f;}; This should be returned in memory,
8022 not a register. Richard Earnshaw sent me a patch, but I do not
8023 know of any way to detect if a function like the above has been
8024 compiled with the correct calling convention. */
8026 /* Assume all other aggregate types can be returned in a register.
8027 Run a check for structures, unions and arrays. */
8030 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
8033 /* Need to check if this struct/union is "integer" like. For
8034 this to be true, its size must be less than or equal to
8035 INT_REGISTER_SIZE and the offset of each addressable
8036 subfield must be zero. Note that bit fields are not
8037 addressable, and unions always start at offset zero. If any
8038 of the subfields is a floating point type, the struct/union
8039 cannot be an integer type. */
8041 /* For each field in the object, check:
8042 1) Is it FP? --> yes, nRc = 1;
8043 2) Is it addressable (bitpos != 0) and
8044 not packed (bitsize == 0)?
8048 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
8050 enum type_code field_type_code
;
8053 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
8056 /* Is it a floating point type field? */
8057 if (field_type_code
== TYPE_CODE_FLT
)
8063 /* If bitpos != 0, then we have to care about it. */
8064 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
8066 /* Bitfields are not addressable. If the field bitsize is
8067 zero, then the field is not packed. Hence it cannot be
8068 a bitfield or any other packed type. */
8069 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
8082 /* Write into appropriate registers a function return value of type
8083 TYPE, given in virtual format. */
8086 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
8087 const gdb_byte
*valbuf
)
8089 struct gdbarch
*gdbarch
= regs
->arch ();
8090 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8092 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
8094 gdb_byte buf
[FP_REGISTER_SIZE
];
8096 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8100 target_float_convert (valbuf
, type
, buf
, arm_ext_type (gdbarch
));
8101 regs
->cooked_write (ARM_F0_REGNUM
, buf
);
8104 case ARM_FLOAT_SOFT_FPA
:
8105 case ARM_FLOAT_SOFT_VFP
:
8106 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8107 not using the VFP ABI code. */
8109 regs
->cooked_write (ARM_A1_REGNUM
, valbuf
);
8110 if (TYPE_LENGTH (type
) > 4)
8111 regs
->cooked_write (ARM_A1_REGNUM
+ 1, valbuf
+ INT_REGISTER_SIZE
);
8115 internal_error (__FILE__
, __LINE__
,
8116 _("arm_store_return_value: Floating "
8117 "point model not supported"));
8121 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8122 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8123 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8124 || TYPE_CODE (type
) == TYPE_CODE_PTR
8125 || TYPE_IS_REFERENCE (type
)
8126 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8128 if (TYPE_LENGTH (type
) <= 4)
8130 /* Values of one word or less are zero/sign-extended and
8132 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8133 LONGEST val
= unpack_long (type
, valbuf
);
8135 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
8136 regs
->cooked_write (ARM_A1_REGNUM
, tmpbuf
);
8140 /* Integral values greater than one word are stored in consecutive
8141 registers starting with r0. This will always be a multiple of
8142 the regiser size. */
8143 int len
= TYPE_LENGTH (type
);
8144 int regno
= ARM_A1_REGNUM
;
8148 regs
->cooked_write (regno
++, valbuf
);
8149 len
-= INT_REGISTER_SIZE
;
8150 valbuf
+= INT_REGISTER_SIZE
;
8156 /* For a structure or union the behaviour is as if the value had
8157 been stored to word-aligned memory and then loaded into
8158 registers with 32-bit load instruction(s). */
8159 int len
= TYPE_LENGTH (type
);
8160 int regno
= ARM_A1_REGNUM
;
8161 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8165 memcpy (tmpbuf
, valbuf
,
8166 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8167 regs
->cooked_write (regno
++, tmpbuf
);
8168 len
-= INT_REGISTER_SIZE
;
8169 valbuf
+= INT_REGISTER_SIZE
;
8175 /* Handle function return values. */
8177 static enum return_value_convention
8178 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
8179 struct type
*valtype
, struct regcache
*regcache
,
8180 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
8182 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8183 struct type
*func_type
= function
? value_type (function
) : NULL
;
8184 enum arm_vfp_cprc_base_type vfp_base_type
;
8187 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
8188 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
8190 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
8191 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
8193 for (i
= 0; i
< vfp_base_count
; i
++)
8195 if (reg_char
== 'q')
8198 arm_neon_quad_write (gdbarch
, regcache
, i
,
8199 writebuf
+ i
* unit_length
);
8202 arm_neon_quad_read (gdbarch
, regcache
, i
,
8203 readbuf
+ i
* unit_length
);
8210 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
8211 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8214 regcache
->cooked_write (regnum
, writebuf
+ i
* unit_length
);
8216 regcache
->cooked_read (regnum
, readbuf
+ i
* unit_length
);
8219 return RETURN_VALUE_REGISTER_CONVENTION
;
8222 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
8223 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
8224 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
8226 if (tdep
->struct_return
== pcc_struct_return
8227 || arm_return_in_memory (gdbarch
, valtype
))
8228 return RETURN_VALUE_STRUCT_CONVENTION
;
8230 else if (TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
)
8232 if (arm_return_in_memory (gdbarch
, valtype
))
8233 return RETURN_VALUE_STRUCT_CONVENTION
;
8237 arm_store_return_value (valtype
, regcache
, writebuf
);
8240 arm_extract_return_value (valtype
, regcache
, readbuf
);
8242 return RETURN_VALUE_REGISTER_CONVENTION
;
8247 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
8249 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
8250 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8251 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8253 gdb_byte buf
[INT_REGISTER_SIZE
];
8255 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
8257 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
8261 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
8265 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8266 return the target PC. Otherwise return 0. */
8269 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
8273 CORE_ADDR start_addr
;
8275 /* Find the starting address and name of the function containing the PC. */
8276 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
8278 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8280 start_addr
= arm_skip_bx_reg (frame
, pc
);
8281 if (start_addr
!= 0)
8287 /* If PC is in a Thumb call or return stub, return the address of the
8288 target PC, which is in a register. The thunk functions are called
8289 _call_via_xx, where x is the register name. The possible names
8290 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8291 functions, named __ARM_call_via_r[0-7]. */
8292 if (startswith (name
, "_call_via_")
8293 || startswith (name
, "__ARM_call_via_"))
8295 /* Use the name suffix to determine which register contains the
8297 static const char *table
[15] =
8298 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8299 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8302 int offset
= strlen (name
) - 2;
8304 for (regno
= 0; regno
<= 14; regno
++)
8305 if (strcmp (&name
[offset
], table
[regno
]) == 0)
8306 return get_frame_register_unsigned (frame
, regno
);
8309 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8310 non-interworking calls to foo. We could decode the stubs
8311 to find the target but it's easier to use the symbol table. */
8312 namelen
= strlen (name
);
8313 if (name
[0] == '_' && name
[1] == '_'
8314 && ((namelen
> 2 + strlen ("_from_thumb")
8315 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
8316 || (namelen
> 2 + strlen ("_from_arm")
8317 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
8320 int target_len
= namelen
- 2;
8321 struct bound_minimal_symbol minsym
;
8322 struct objfile
*objfile
;
8323 struct obj_section
*sec
;
8325 if (name
[namelen
- 1] == 'b')
8326 target_len
-= strlen ("_from_thumb");
8328 target_len
-= strlen ("_from_arm");
8330 target_name
= (char *) alloca (target_len
+ 1);
8331 memcpy (target_name
, name
+ 2, target_len
);
8332 target_name
[target_len
] = '\0';
8334 sec
= find_pc_section (pc
);
8335 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
8336 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
8337 if (minsym
.minsym
!= NULL
)
8338 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8343 return 0; /* not a stub */
8347 set_arm_command (const char *args
, int from_tty
)
8349 printf_unfiltered (_("\
8350 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8351 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
8355 show_arm_command (const char *args
, int from_tty
)
8357 cmd_show_list (showarmcmdlist
, from_tty
, "");
8361 arm_update_current_architecture (void)
8363 struct gdbarch_info info
;
8365 /* If the current architecture is not ARM, we have nothing to do. */
8366 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
8369 /* Update the architecture. */
8370 gdbarch_info_init (&info
);
8372 if (!gdbarch_update_p (info
))
8373 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
8377 set_fp_model_sfunc (const char *args
, int from_tty
,
8378 struct cmd_list_element
*c
)
8382 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
8383 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
8385 arm_fp_model
= (enum arm_float_model
) fp_model
;
8389 if (fp_model
== ARM_FLOAT_LAST
)
8390 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
8393 arm_update_current_architecture ();
8397 show_fp_model (struct ui_file
*file
, int from_tty
,
8398 struct cmd_list_element
*c
, const char *value
)
8400 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8402 if (arm_fp_model
== ARM_FLOAT_AUTO
8403 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8404 fprintf_filtered (file
, _("\
8405 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8406 fp_model_strings
[tdep
->fp_model
]);
8408 fprintf_filtered (file
, _("\
8409 The current ARM floating point model is \"%s\".\n"),
8410 fp_model_strings
[arm_fp_model
]);
8414 arm_set_abi (const char *args
, int from_tty
,
8415 struct cmd_list_element
*c
)
8419 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
8420 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
8422 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
8426 if (arm_abi
== ARM_ABI_LAST
)
8427 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
8430 arm_update_current_architecture ();
8434 arm_show_abi (struct ui_file
*file
, int from_tty
,
8435 struct cmd_list_element
*c
, const char *value
)
8437 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8439 if (arm_abi_global
== ARM_ABI_AUTO
8440 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8441 fprintf_filtered (file
, _("\
8442 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8443 arm_abi_strings
[tdep
->arm_abi
]);
8445 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
8450 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
8451 struct cmd_list_element
*c
, const char *value
)
8453 fprintf_filtered (file
,
8454 _("The current execution mode assumed "
8455 "(when symbols are unavailable) is \"%s\".\n"),
8456 arm_fallback_mode_string
);
8460 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
8461 struct cmd_list_element
*c
, const char *value
)
8463 fprintf_filtered (file
,
8464 _("The current execution mode assumed "
8465 "(even when symbols are available) is \"%s\".\n"),
8466 arm_force_mode_string
);
8469 /* If the user changes the register disassembly style used for info
8470 register and other commands, we have to also switch the style used
8471 in opcodes for disassembly output. This function is run in the "set
8472 arm disassembly" command, and does that. */
8475 set_disassembly_style_sfunc (const char *args
, int from_tty
,
8476 struct cmd_list_element
*c
)
8478 /* Convert the short style name into the long style name (eg, reg-names-*)
8479 before calling the generic set_disassembler_options() function. */
8480 std::string long_name
= std::string ("reg-names-") + disassembly_style
;
8481 set_disassembler_options (&long_name
[0]);
8485 show_disassembly_style_sfunc (struct ui_file
*file
, int from_tty
,
8486 struct cmd_list_element
*c
, const char *value
)
8488 struct gdbarch
*gdbarch
= get_current_arch ();
8489 char *options
= get_disassembler_options (gdbarch
);
8490 const char *style
= "";
8494 FOR_EACH_DISASSEMBLER_OPTION (opt
, options
)
8495 if (CONST_STRNEQ (opt
, "reg-names-"))
8497 style
= &opt
[strlen ("reg-names-")];
8498 len
= strcspn (style
, ",");
8501 fprintf_unfiltered (file
, "The disassembly style is \"%.*s\".\n", len
, style
);
8504 /* Return the ARM register name corresponding to register I. */
8506 arm_register_name (struct gdbarch
*gdbarch
, int i
)
8508 const int num_regs
= gdbarch_num_regs (gdbarch
);
8510 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
8511 && i
>= num_regs
&& i
< num_regs
+ 32)
8513 static const char *const vfp_pseudo_names
[] = {
8514 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8515 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8516 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8517 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8520 return vfp_pseudo_names
[i
- num_regs
];
8523 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
8524 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
8526 static const char *const neon_pseudo_names
[] = {
8527 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8528 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8531 return neon_pseudo_names
[i
- num_regs
- 32];
8534 if (i
>= ARRAY_SIZE (arm_register_names
))
8535 /* These registers are only supported on targets which supply
8536 an XML description. */
8539 return arm_register_names
[i
];
8542 /* Test whether the coff symbol specific value corresponds to a Thumb
8546 coff_sym_is_thumb (int val
)
8548 return (val
== C_THUMBEXT
8549 || val
== C_THUMBSTAT
8550 || val
== C_THUMBEXTFUNC
8551 || val
== C_THUMBSTATFUNC
8552 || val
== C_THUMBLABEL
);
8555 /* arm_coff_make_msymbol_special()
8556 arm_elf_make_msymbol_special()
8558 These functions test whether the COFF or ELF symbol corresponds to
8559 an address in thumb code, and set a "special" bit in a minimal
8560 symbol to indicate that it does. */
8563 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
8565 elf_symbol_type
*elfsym
= (elf_symbol_type
*) sym
;
8567 if (ARM_GET_SYM_BRANCH_TYPE (elfsym
->internal_elf_sym
.st_target_internal
)
8568 == ST_BRANCH_TO_THUMB
)
8569 MSYMBOL_SET_SPECIAL (msym
);
8573 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
8575 if (coff_sym_is_thumb (val
))
8576 MSYMBOL_SET_SPECIAL (msym
);
8580 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
8582 struct arm_per_objfile
*data
= (struct arm_per_objfile
*) arg
;
8585 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
8586 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
8590 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
8593 const char *name
= bfd_asymbol_name (sym
);
8594 struct arm_per_objfile
*data
;
8595 VEC(arm_mapping_symbol_s
) **map_p
;
8596 struct arm_mapping_symbol new_map_sym
;
8598 gdb_assert (name
[0] == '$');
8599 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
8602 data
= (struct arm_per_objfile
*) objfile_data (objfile
,
8603 arm_objfile_data_key
);
8606 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
8607 struct arm_per_objfile
);
8608 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
8609 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
8610 objfile
->obfd
->section_count
,
8611 VEC(arm_mapping_symbol_s
) *);
8613 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
8615 new_map_sym
.value
= sym
->value
;
8616 new_map_sym
.type
= name
[1];
8618 /* Assume that most mapping symbols appear in order of increasing
8619 value. If they were randomly distributed, it would be faster to
8620 always push here and then sort at first use. */
8621 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
8623 struct arm_mapping_symbol
*prev_map_sym
;
8625 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
8626 if (prev_map_sym
->value
>= sym
->value
)
8629 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
8630 arm_compare_mapping_symbols
);
8631 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
8636 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
8640 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
8642 struct gdbarch
*gdbarch
= regcache
->arch ();
8643 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
8645 /* If necessary, set the T bit. */
8648 ULONGEST val
, t_bit
;
8649 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
8650 t_bit
= arm_psr_thumb_bit (gdbarch
);
8651 if (arm_pc_is_thumb (gdbarch
, pc
))
8652 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8655 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8660 /* Read the contents of a NEON quad register, by reading from two
8661 double registers. This is used to implement the quad pseudo
8662 registers, and for argument passing in case the quad registers are
8663 missing; vectors are passed in quad registers when using the VFP
8664 ABI, even if a NEON unit is not present. REGNUM is the index of
8665 the quad register, in [0, 15]. */
8667 static enum register_status
8668 arm_neon_quad_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8669 int regnum
, gdb_byte
*buf
)
8672 gdb_byte reg_buf
[8];
8673 int offset
, double_regnum
;
8674 enum register_status status
;
8676 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8677 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8680 /* d0 is always the least significant half of q0. */
8681 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8686 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8687 if (status
!= REG_VALID
)
8689 memcpy (buf
+ offset
, reg_buf
, 8);
8691 offset
= 8 - offset
;
8692 status
= regcache
->raw_read (double_regnum
+ 1, reg_buf
);
8693 if (status
!= REG_VALID
)
8695 memcpy (buf
+ offset
, reg_buf
, 8);
8700 static enum register_status
8701 arm_pseudo_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8702 int regnum
, gdb_byte
*buf
)
8704 const int num_regs
= gdbarch_num_regs (gdbarch
);
8706 gdb_byte reg_buf
[8];
8707 int offset
, double_regnum
;
8709 gdb_assert (regnum
>= num_regs
);
8712 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8713 /* Quad-precision register. */
8714 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
8717 enum register_status status
;
8719 /* Single-precision register. */
8720 gdb_assert (regnum
< 32);
8722 /* s0 is always the least significant half of d0. */
8723 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8724 offset
= (regnum
& 1) ? 0 : 4;
8726 offset
= (regnum
& 1) ? 4 : 0;
8728 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8729 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8732 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8733 if (status
== REG_VALID
)
8734 memcpy (buf
, reg_buf
+ offset
, 4);
8739 /* Store the contents of BUF to a NEON quad register, by writing to
8740 two double registers. This is used to implement the quad pseudo
8741 registers, and for argument passing in case the quad registers are
8742 missing; vectors are passed in quad registers when using the VFP
8743 ABI, even if a NEON unit is not present. REGNUM is the index
8744 of the quad register, in [0, 15]. */
8747 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8748 int regnum
, const gdb_byte
*buf
)
8751 int offset
, double_regnum
;
8753 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8754 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8757 /* d0 is always the least significant half of q0. */
8758 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8763 regcache
->raw_write (double_regnum
, buf
+ offset
);
8764 offset
= 8 - offset
;
8765 regcache
->raw_write (double_regnum
+ 1, buf
+ offset
);
8769 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8770 int regnum
, const gdb_byte
*buf
)
8772 const int num_regs
= gdbarch_num_regs (gdbarch
);
8774 gdb_byte reg_buf
[8];
8775 int offset
, double_regnum
;
8777 gdb_assert (regnum
>= num_regs
);
8780 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8781 /* Quad-precision register. */
8782 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
8785 /* Single-precision register. */
8786 gdb_assert (regnum
< 32);
8788 /* s0 is always the least significant half of d0. */
8789 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8790 offset
= (regnum
& 1) ? 0 : 4;
8792 offset
= (regnum
& 1) ? 4 : 0;
8794 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8795 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8798 regcache
->raw_read (double_regnum
, reg_buf
);
8799 memcpy (reg_buf
+ offset
, buf
, 4);
8800 regcache
->raw_write (double_regnum
, reg_buf
);
8804 static struct value
*
8805 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
8807 const int *reg_p
= (const int *) baton
;
8808 return value_of_register (*reg_p
, frame
);
8811 static enum gdb_osabi
8812 arm_elf_osabi_sniffer (bfd
*abfd
)
8814 unsigned int elfosabi
;
8815 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
8817 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
8819 if (elfosabi
== ELFOSABI_ARM
)
8820 /* GNU tools use this value. Check note sections in this case,
8822 bfd_map_over_sections (abfd
,
8823 generic_elf_osabi_sniff_abi_tag_sections
,
8826 /* Anything else will be handled by the generic ELF sniffer. */
8831 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
8832 struct reggroup
*group
)
8834 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8835 this, FPS register belongs to save_regroup, restore_reggroup, and
8836 all_reggroup, of course. */
8837 if (regnum
== ARM_FPS_REGNUM
)
8838 return (group
== float_reggroup
8839 || group
== save_reggroup
8840 || group
== restore_reggroup
8841 || group
== all_reggroup
);
8843 return default_register_reggroup_p (gdbarch
, regnum
, group
);
8847 /* For backward-compatibility we allow two 'g' packet lengths with
8848 the remote protocol depending on whether FPA registers are
8849 supplied. M-profile targets do not have FPA registers, but some
8850 stubs already exist in the wild which use a 'g' packet which
8851 supplies them albeit with dummy values. The packet format which
8852 includes FPA registers should be considered deprecated for
8853 M-profile targets. */
8856 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
8858 if (gdbarch_tdep (gdbarch
)->is_m
)
8860 /* If we know from the executable this is an M-profile target,
8861 cater for remote targets whose register set layout is the
8862 same as the FPA layout. */
8863 register_remote_g_packet_guess (gdbarch
,
8864 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8865 (16 * INT_REGISTER_SIZE
)
8866 + (8 * FP_REGISTER_SIZE
)
8867 + (2 * INT_REGISTER_SIZE
),
8868 tdesc_arm_with_m_fpa_layout
);
8870 /* The regular M-profile layout. */
8871 register_remote_g_packet_guess (gdbarch
,
8872 /* r0-r12,sp,lr,pc; xpsr */
8873 (16 * INT_REGISTER_SIZE
)
8874 + INT_REGISTER_SIZE
,
8877 /* M-profile plus M4F VFP. */
8878 register_remote_g_packet_guess (gdbarch
,
8879 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8880 (16 * INT_REGISTER_SIZE
)
8881 + (16 * VFP_REGISTER_SIZE
)
8882 + (2 * INT_REGISTER_SIZE
),
8883 tdesc_arm_with_m_vfp_d16
);
8886 /* Otherwise we don't have a useful guess. */
8889 /* Implement the code_of_frame_writable gdbarch method. */
8892 arm_code_of_frame_writable (struct gdbarch
*gdbarch
, struct frame_info
*frame
)
8894 if (gdbarch_tdep (gdbarch
)->is_m
8895 && get_frame_type (frame
) == SIGTRAMP_FRAME
)
8897 /* M-profile exception frames return to some magic PCs, where
8898 isn't writable at all. */
8906 /* Initialize the current architecture based on INFO. If possible,
8907 re-use an architecture from ARCHES, which is a list of
8908 architectures already created during this debugging session.
8910 Called e.g. at program startup, when reading a core file, and when
8911 reading a binary file. */
8913 static struct gdbarch
*
8914 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
8916 struct gdbarch_tdep
*tdep
;
8917 struct gdbarch
*gdbarch
;
8918 struct gdbarch_list
*best_arch
;
8919 enum arm_abi_kind arm_abi
= arm_abi_global
;
8920 enum arm_float_model fp_model
= arm_fp_model
;
8921 struct tdesc_arch_data
*tdesc_data
= NULL
;
8923 int vfp_register_count
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
8924 int have_wmmx_registers
= 0;
8926 int have_fpa_registers
= 1;
8927 const struct target_desc
*tdesc
= info
.target_desc
;
8929 /* If we have an object to base this architecture on, try to determine
8932 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
8934 int ei_osabi
, e_flags
;
8936 switch (bfd_get_flavour (info
.abfd
))
8938 case bfd_target_coff_flavour
:
8939 /* Assume it's an old APCS-style ABI. */
8941 arm_abi
= ARM_ABI_APCS
;
8944 case bfd_target_elf_flavour
:
8945 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
8946 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
8948 if (ei_osabi
== ELFOSABI_ARM
)
8950 /* GNU tools used to use this value, but do not for EABI
8951 objects. There's nowhere to tag an EABI version
8952 anyway, so assume APCS. */
8953 arm_abi
= ARM_ABI_APCS
;
8955 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
8957 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
8961 case EF_ARM_EABI_UNKNOWN
:
8962 /* Assume GNU tools. */
8963 arm_abi
= ARM_ABI_APCS
;
8966 case EF_ARM_EABI_VER4
:
8967 case EF_ARM_EABI_VER5
:
8968 arm_abi
= ARM_ABI_AAPCS
;
8969 /* EABI binaries default to VFP float ordering.
8970 They may also contain build attributes that can
8971 be used to identify if the VFP argument-passing
8973 if (fp_model
== ARM_FLOAT_AUTO
)
8976 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
8980 case AEABI_VFP_args_base
:
8981 /* "The user intended FP parameter/result
8982 passing to conform to AAPCS, base
8984 fp_model
= ARM_FLOAT_SOFT_VFP
;
8986 case AEABI_VFP_args_vfp
:
8987 /* "The user intended FP parameter/result
8988 passing to conform to AAPCS, VFP
8990 fp_model
= ARM_FLOAT_VFP
;
8992 case AEABI_VFP_args_toolchain
:
8993 /* "The user intended FP parameter/result
8994 passing to conform to tool chain-specific
8995 conventions" - we don't know any such
8996 conventions, so leave it as "auto". */
8998 case AEABI_VFP_args_compatible
:
8999 /* "Code is compatible with both the base
9000 and VFP variants; the user did not permit
9001 non-variadic functions to pass FP
9002 parameters/results" - leave it as
9006 /* Attribute value not mentioned in the
9007 November 2012 ABI, so leave it as
9012 fp_model
= ARM_FLOAT_SOFT_VFP
;
9018 /* Leave it as "auto". */
9019 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
9024 /* Detect M-profile programs. This only works if the
9025 executable file includes build attributes; GCC does
9026 copy them to the executable, but e.g. RealView does
9029 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9032 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9033 Tag_CPU_arch_profile
);
9035 /* GCC specifies the profile for v6-M; RealView only
9036 specifies the profile for architectures starting with
9037 V7 (as opposed to architectures with a tag
9038 numerically greater than TAG_CPU_ARCH_V7). */
9039 if (!tdesc_has_registers (tdesc
)
9040 && (attr_arch
== TAG_CPU_ARCH_V6_M
9041 || attr_arch
== TAG_CPU_ARCH_V6S_M
9042 || attr_profile
== 'M'))
9047 if (fp_model
== ARM_FLOAT_AUTO
)
9049 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
9052 /* Leave it as "auto". Strictly speaking this case
9053 means FPA, but almost nobody uses that now, and
9054 many toolchains fail to set the appropriate bits
9055 for the floating-point model they use. */
9057 case EF_ARM_SOFT_FLOAT
:
9058 fp_model
= ARM_FLOAT_SOFT_FPA
;
9060 case EF_ARM_VFP_FLOAT
:
9061 fp_model
= ARM_FLOAT_VFP
;
9063 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
9064 fp_model
= ARM_FLOAT_SOFT_VFP
;
9069 if (e_flags
& EF_ARM_BE8
)
9070 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
9075 /* Leave it as "auto". */
9080 /* Check any target description for validity. */
9081 if (tdesc_has_registers (tdesc
))
9083 /* For most registers we require GDB's default names; but also allow
9084 the numeric names for sp / lr / pc, as a convenience. */
9085 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
9086 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
9087 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
9089 const struct tdesc_feature
*feature
;
9092 feature
= tdesc_find_feature (tdesc
,
9093 "org.gnu.gdb.arm.core");
9094 if (feature
== NULL
)
9096 feature
= tdesc_find_feature (tdesc
,
9097 "org.gnu.gdb.arm.m-profile");
9098 if (feature
== NULL
)
9104 tdesc_data
= tdesc_data_alloc ();
9107 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
9108 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9109 arm_register_names
[i
]);
9110 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9113 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9116 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9120 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9121 ARM_PS_REGNUM
, "xpsr");
9123 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9124 ARM_PS_REGNUM
, "cpsr");
9128 tdesc_data_cleanup (tdesc_data
);
9132 feature
= tdesc_find_feature (tdesc
,
9133 "org.gnu.gdb.arm.fpa");
9134 if (feature
!= NULL
)
9137 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
9138 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9139 arm_register_names
[i
]);
9142 tdesc_data_cleanup (tdesc_data
);
9147 have_fpa_registers
= 0;
9149 feature
= tdesc_find_feature (tdesc
,
9150 "org.gnu.gdb.xscale.iwmmxt");
9151 if (feature
!= NULL
)
9153 static const char *const iwmmxt_names
[] = {
9154 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9155 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9156 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9157 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9161 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
9163 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9164 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9166 /* Check for the control registers, but do not fail if they
9168 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
9169 tdesc_numbered_register (feature
, tdesc_data
, i
,
9170 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9172 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
9174 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9175 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9179 tdesc_data_cleanup (tdesc_data
);
9183 have_wmmx_registers
= 1;
9186 /* If we have a VFP unit, check whether the single precision registers
9187 are present. If not, then we will synthesize them as pseudo
9189 feature
= tdesc_find_feature (tdesc
,
9190 "org.gnu.gdb.arm.vfp");
9191 if (feature
!= NULL
)
9193 static const char *const vfp_double_names
[] = {
9194 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9195 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9196 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9197 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9200 /* Require the double precision registers. There must be either
9203 for (i
= 0; i
< 32; i
++)
9205 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9207 vfp_double_names
[i
]);
9211 if (!valid_p
&& i
== 16)
9214 /* Also require FPSCR. */
9215 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9216 ARM_FPSCR_REGNUM
, "fpscr");
9219 tdesc_data_cleanup (tdesc_data
);
9223 if (tdesc_unnumbered_register (feature
, "s0") == 0)
9224 have_vfp_pseudos
= 1;
9226 vfp_register_count
= i
;
9228 /* If we have VFP, also check for NEON. The architecture allows
9229 NEON without VFP (integer vector operations only), but GDB
9230 does not support that. */
9231 feature
= tdesc_find_feature (tdesc
,
9232 "org.gnu.gdb.arm.neon");
9233 if (feature
!= NULL
)
9235 /* NEON requires 32 double-precision registers. */
9238 tdesc_data_cleanup (tdesc_data
);
9242 /* If there are quad registers defined by the stub, use
9243 their type; otherwise (normally) provide them with
9244 the default type. */
9245 if (tdesc_unnumbered_register (feature
, "q0") == 0)
9246 have_neon_pseudos
= 1;
9253 /* If there is already a candidate, use it. */
9254 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
9256 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
9258 if (arm_abi
!= ARM_ABI_AUTO
9259 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
9262 if (fp_model
!= ARM_FLOAT_AUTO
9263 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
9266 /* There are various other properties in tdep that we do not
9267 need to check here: those derived from a target description,
9268 since gdbarches with a different target description are
9269 automatically disqualified. */
9271 /* Do check is_m, though, since it might come from the binary. */
9272 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
9275 /* Found a match. */
9279 if (best_arch
!= NULL
)
9281 if (tdesc_data
!= NULL
)
9282 tdesc_data_cleanup (tdesc_data
);
9283 return best_arch
->gdbarch
;
9286 tdep
= XCNEW (struct gdbarch_tdep
);
9287 gdbarch
= gdbarch_alloc (&info
, tdep
);
9289 /* Record additional information about the architecture we are defining.
9290 These are gdbarch discriminators, like the OSABI. */
9291 tdep
->arm_abi
= arm_abi
;
9292 tdep
->fp_model
= fp_model
;
9294 tdep
->have_fpa_registers
= have_fpa_registers
;
9295 tdep
->have_wmmx_registers
= have_wmmx_registers
;
9296 gdb_assert (vfp_register_count
== 0
9297 || vfp_register_count
== 16
9298 || vfp_register_count
== 32);
9299 tdep
->vfp_register_count
= vfp_register_count
;
9300 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
9301 tdep
->have_neon_pseudos
= have_neon_pseudos
;
9302 tdep
->have_neon
= have_neon
;
9304 arm_register_g_packet_guesses (gdbarch
);
9307 switch (info
.byte_order_for_code
)
9309 case BFD_ENDIAN_BIG
:
9310 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
9311 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
9312 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
9313 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
9317 case BFD_ENDIAN_LITTLE
:
9318 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
9319 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
9320 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
9321 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
9326 internal_error (__FILE__
, __LINE__
,
9327 _("arm_gdbarch_init: bad byte order for float format"));
9330 /* On ARM targets char defaults to unsigned. */
9331 set_gdbarch_char_signed (gdbarch
, 0);
9333 /* wchar_t is unsigned under the AAPCS. */
9334 if (tdep
->arm_abi
== ARM_ABI_AAPCS
)
9335 set_gdbarch_wchar_signed (gdbarch
, 0);
9337 set_gdbarch_wchar_signed (gdbarch
, 1);
9339 /* Note: for displaced stepping, this includes the breakpoint, and one word
9340 of additional scratch space. This setting isn't used for anything beside
9341 displaced stepping at present. */
9342 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
9344 /* This should be low enough for everything. */
9345 tdep
->lowest_pc
= 0x20;
9346 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
9348 /* The default, for both APCS and AAPCS, is to return small
9349 structures in registers. */
9350 tdep
->struct_return
= reg_struct_return
;
9352 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
9353 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
9356 set_gdbarch_code_of_frame_writable (gdbarch
, arm_code_of_frame_writable
);
9358 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
9360 /* Frame handling. */
9361 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
9362 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
9363 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
9365 frame_base_set_default (gdbarch
, &arm_normal_base
);
9367 /* Address manipulation. */
9368 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
9370 /* Advance PC across function entry code. */
9371 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
9373 /* Detect whether PC is at a point where the stack has been destroyed. */
9374 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
9376 /* Skip trampolines. */
9377 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
9379 /* The stack grows downward. */
9380 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
9382 /* Breakpoint manipulation. */
9383 set_gdbarch_breakpoint_kind_from_pc (gdbarch
, arm_breakpoint_kind_from_pc
);
9384 set_gdbarch_sw_breakpoint_from_kind (gdbarch
, arm_sw_breakpoint_from_kind
);
9385 set_gdbarch_breakpoint_kind_from_current_state (gdbarch
,
9386 arm_breakpoint_kind_from_current_state
);
9388 /* Information about registers, etc. */
9389 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
9390 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
9391 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
9392 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9393 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
9395 /* This "info float" is FPA-specific. Use the generic version if we
9397 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
9398 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
9400 /* Internal <-> external register number maps. */
9401 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
9402 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
9404 set_gdbarch_register_name (gdbarch
, arm_register_name
);
9406 /* Returning results. */
9407 set_gdbarch_return_value (gdbarch
, arm_return_value
);
9410 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
9412 /* Minsymbol frobbing. */
9413 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
9414 set_gdbarch_coff_make_msymbol_special (gdbarch
,
9415 arm_coff_make_msymbol_special
);
9416 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
9418 /* Thumb-2 IT block support. */
9419 set_gdbarch_adjust_breakpoint_address (gdbarch
,
9420 arm_adjust_breakpoint_address
);
9422 /* Virtual tables. */
9423 set_gdbarch_vbit_in_delta (gdbarch
, 1);
9425 /* Hook in the ABI-specific overrides, if they have been registered. */
9426 gdbarch_init_osabi (info
, gdbarch
);
9428 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
9430 /* Add some default predicates. */
9432 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
9433 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
9434 dwarf2_append_unwinders (gdbarch
);
9435 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
9436 frame_unwind_append_unwinder (gdbarch
, &arm_epilogue_frame_unwind
);
9437 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
9439 /* Now we have tuned the configuration, set a few final things,
9440 based on what the OS ABI has told us. */
9442 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9443 binaries are always marked. */
9444 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
9445 tdep
->arm_abi
= ARM_ABI_APCS
;
9447 /* Watchpoints are not steppable. */
9448 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
9450 /* We used to default to FPA for generic ARM, but almost nobody
9451 uses that now, and we now provide a way for the user to force
9452 the model. So default to the most useful variant. */
9453 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
9454 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
9456 if (tdep
->jb_pc
>= 0)
9457 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
9459 /* Floating point sizes and format. */
9460 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
9461 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
9463 set_gdbarch_double_format
9464 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9465 set_gdbarch_long_double_format
9466 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9470 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
9471 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
9474 if (have_vfp_pseudos
)
9476 /* NOTE: These are the only pseudo registers used by
9477 the ARM target at the moment. If more are added, a
9478 little more care in numbering will be needed. */
9480 int num_pseudos
= 32;
9481 if (have_neon_pseudos
)
9483 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
9484 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
9485 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
9490 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
9492 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
9494 /* Override tdesc_register_type to adjust the types of VFP
9495 registers for NEON. */
9496 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9499 /* Add standard register aliases. We add aliases even for those
9500 nanes which are used by the current architecture - it's simpler,
9501 and does no harm, since nothing ever lists user registers. */
9502 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
9503 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
9504 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
9506 set_gdbarch_disassembler_options (gdbarch
, &arm_disassembler_options
);
9507 set_gdbarch_valid_disassembler_options (gdbarch
, disassembler_options_arm ());
9513 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
9515 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9520 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9521 (unsigned long) tdep
->lowest_pc
);
9527 static void arm_record_test (void);
9532 _initialize_arm_tdep (void)
9536 char regdesc
[1024], *rdptr
= regdesc
;
9537 size_t rest
= sizeof (regdesc
);
9539 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
9541 arm_objfile_data_key
9542 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
9544 /* Add ourselves to objfile event chain. */
9545 gdb::observers::new_objfile
.attach (arm_exidx_new_objfile
);
9547 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
9549 /* Register an ELF OS ABI sniffer for ARM binaries. */
9550 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
9551 bfd_target_elf_flavour
,
9552 arm_elf_osabi_sniffer
);
9554 /* Initialize the standard target descriptions. */
9555 initialize_tdesc_arm_with_m ();
9556 initialize_tdesc_arm_with_m_fpa_layout ();
9557 initialize_tdesc_arm_with_m_vfp_d16 ();
9558 initialize_tdesc_arm_with_iwmmxt ();
9559 initialize_tdesc_arm_with_vfpv2 ();
9560 initialize_tdesc_arm_with_vfpv3 ();
9561 initialize_tdesc_arm_with_neon ();
9563 /* Add root prefix command for all "set arm"/"show arm" commands. */
9564 add_prefix_cmd ("arm", no_class
, set_arm_command
,
9565 _("Various ARM-specific commands."),
9566 &setarmcmdlist
, "set arm ", 0, &setlist
);
9568 add_prefix_cmd ("arm", no_class
, show_arm_command
,
9569 _("Various ARM-specific commands."),
9570 &showarmcmdlist
, "show arm ", 0, &showlist
);
9573 arm_disassembler_options
= xstrdup ("reg-names-std");
9574 const disasm_options_t
*disasm_options
9575 = &disassembler_options_arm ()->options
;
9576 int num_disassembly_styles
= 0;
9577 for (i
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9578 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9579 num_disassembly_styles
++;
9581 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9582 valid_disassembly_styles
= XNEWVEC (const char *,
9583 num_disassembly_styles
+ 1);
9584 for (i
= j
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9585 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9587 size_t offset
= strlen ("reg-names-");
9588 const char *style
= disasm_options
->name
[i
];
9589 valid_disassembly_styles
[j
++] = &style
[offset
];
9590 length
= snprintf (rdptr
, rest
, "%s - %s\n", &style
[offset
],
9591 disasm_options
->description
[i
]);
9595 /* Mark the end of valid options. */
9596 valid_disassembly_styles
[num_disassembly_styles
] = NULL
;
9598 /* Create the help text. */
9599 std::string helptext
= string_printf ("%s%s%s",
9600 _("The valid values are:\n"),
9602 _("The default is \"std\"."));
9604 add_setshow_enum_cmd("disassembler", no_class
,
9605 valid_disassembly_styles
, &disassembly_style
,
9606 _("Set the disassembly style."),
9607 _("Show the disassembly style."),
9609 set_disassembly_style_sfunc
,
9610 show_disassembly_style_sfunc
,
9611 &setarmcmdlist
, &showarmcmdlist
);
9613 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
9614 _("Set usage of ARM 32-bit mode."),
9615 _("Show usage of ARM 32-bit mode."),
9616 _("When off, a 26-bit PC will be used."),
9618 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
9620 &setarmcmdlist
, &showarmcmdlist
);
9622 /* Add a command to allow the user to force the FPU model. */
9623 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
9624 _("Set the floating point type."),
9625 _("Show the floating point type."),
9626 _("auto - Determine the FP typefrom the OS-ABI.\n\
9627 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9628 fpa - FPA co-processor (GCC compiled).\n\
9629 softvfp - Software FP with pure-endian doubles.\n\
9630 vfp - VFP co-processor."),
9631 set_fp_model_sfunc
, show_fp_model
,
9632 &setarmcmdlist
, &showarmcmdlist
);
9634 /* Add a command to allow the user to force the ABI. */
9635 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
9638 NULL
, arm_set_abi
, arm_show_abi
,
9639 &setarmcmdlist
, &showarmcmdlist
);
9641 /* Add two commands to allow the user to force the assumed
9643 add_setshow_enum_cmd ("fallback-mode", class_support
,
9644 arm_mode_strings
, &arm_fallback_mode_string
,
9645 _("Set the mode assumed when symbols are unavailable."),
9646 _("Show the mode assumed when symbols are unavailable."),
9647 NULL
, NULL
, arm_show_fallback_mode
,
9648 &setarmcmdlist
, &showarmcmdlist
);
9649 add_setshow_enum_cmd ("force-mode", class_support
,
9650 arm_mode_strings
, &arm_force_mode_string
,
9651 _("Set the mode assumed even when symbols are available."),
9652 _("Show the mode assumed even when symbols are available."),
9653 NULL
, NULL
, arm_show_force_mode
,
9654 &setarmcmdlist
, &showarmcmdlist
);
9656 /* Debugging flag. */
9657 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
9658 _("Set ARM debugging."),
9659 _("Show ARM debugging."),
9660 _("When on, arm-specific debugging is enabled."),
9662 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
9663 &setdebuglist
, &showdebuglist
);
9666 selftests::register_test ("arm-record", selftests::arm_record_test
);
9671 /* ARM-reversible process record data structures. */
9673 #define ARM_INSN_SIZE_BYTES 4
9674 #define THUMB_INSN_SIZE_BYTES 2
9675 #define THUMB2_INSN_SIZE_BYTES 4
9678 /* Position of the bit within a 32-bit ARM instruction
9679 that defines whether the instruction is a load or store. */
9680 #define INSN_S_L_BIT_NUM 20
9682 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9685 unsigned int reg_len = LENGTH; \
9688 REGS = XNEWVEC (uint32_t, reg_len); \
9689 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9694 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9697 unsigned int mem_len = LENGTH; \
9700 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9701 memcpy(&MEMS->len, &RECORD_BUF[0], \
9702 sizeof(struct arm_mem_r) * LENGTH); \
9707 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9708 #define INSN_RECORDED(ARM_RECORD) \
9709 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9711 /* ARM memory record structure. */
9714 uint32_t len
; /* Record length. */
9715 uint32_t addr
; /* Memory address. */
9718 /* ARM instruction record contains opcode of current insn
9719 and execution state (before entry to decode_insn()),
9720 contains list of to-be-modified registers and
9721 memory blocks (on return from decode_insn()). */
9723 typedef struct insn_decode_record_t
9725 struct gdbarch
*gdbarch
;
9726 struct regcache
*regcache
;
9727 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
9728 uint32_t arm_insn
; /* Should accommodate thumb. */
9729 uint32_t cond
; /* Condition code. */
9730 uint32_t opcode
; /* Insn opcode. */
9731 uint32_t decode
; /* Insn decode bits. */
9732 uint32_t mem_rec_count
; /* No of mem records. */
9733 uint32_t reg_rec_count
; /* No of reg records. */
9734 uint32_t *arm_regs
; /* Registers to be saved for this record. */
9735 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
9736 } insn_decode_record
;
9739 /* Checks ARM SBZ and SBO mandatory fields. */
9742 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
9744 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
9763 enum arm_record_result
9765 ARM_RECORD_SUCCESS
= 0,
9766 ARM_RECORD_FAILURE
= 1
9773 } arm_record_strx_t
;
9784 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
9785 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
9788 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9789 ULONGEST u_regval
[2]= {0};
9791 uint32_t reg_src1
= 0, reg_src2
= 0;
9792 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
9794 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
9795 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
9797 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
9799 /* 1) Handle misc store, immediate offset. */
9800 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9801 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9802 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9803 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
9805 if (ARM_PC_REGNUM
== reg_src1
)
9807 /* If R15 was used as Rn, hence current PC+8. */
9808 u_regval
[0] = u_regval
[0] + 8;
9810 offset_8
= (immed_high
<< 4) | immed_low
;
9811 /* Calculate target store address. */
9812 if (14 == arm_insn_r
->opcode
)
9814 tgt_mem_addr
= u_regval
[0] + offset_8
;
9818 tgt_mem_addr
= u_regval
[0] - offset_8
;
9820 if (ARM_RECORD_STRH
== str_type
)
9822 record_buf_mem
[0] = 2;
9823 record_buf_mem
[1] = tgt_mem_addr
;
9824 arm_insn_r
->mem_rec_count
= 1;
9826 else if (ARM_RECORD_STRD
== str_type
)
9828 record_buf_mem
[0] = 4;
9829 record_buf_mem
[1] = tgt_mem_addr
;
9830 record_buf_mem
[2] = 4;
9831 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9832 arm_insn_r
->mem_rec_count
= 2;
9835 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
9837 /* 2) Store, register offset. */
9839 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9841 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9842 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9843 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9846 /* If R15 was used as Rn, hence current PC+8. */
9847 u_regval
[0] = u_regval
[0] + 8;
9849 /* Calculate target store address, Rn +/- Rm, register offset. */
9850 if (12 == arm_insn_r
->opcode
)
9852 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9856 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9858 if (ARM_RECORD_STRH
== str_type
)
9860 record_buf_mem
[0] = 2;
9861 record_buf_mem
[1] = tgt_mem_addr
;
9862 arm_insn_r
->mem_rec_count
= 1;
9864 else if (ARM_RECORD_STRD
== str_type
)
9866 record_buf_mem
[0] = 4;
9867 record_buf_mem
[1] = tgt_mem_addr
;
9868 record_buf_mem
[2] = 4;
9869 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9870 arm_insn_r
->mem_rec_count
= 2;
9873 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
9874 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9876 /* 3) Store, immediate pre-indexed. */
9877 /* 5) Store, immediate post-indexed. */
9878 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9879 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9880 offset_8
= (immed_high
<< 4) | immed_low
;
9881 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9882 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9883 /* Calculate target store address, Rn +/- Rm, register offset. */
9884 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9886 tgt_mem_addr
= u_regval
[0] + offset_8
;
9890 tgt_mem_addr
= u_regval
[0] - offset_8
;
9892 if (ARM_RECORD_STRH
== str_type
)
9894 record_buf_mem
[0] = 2;
9895 record_buf_mem
[1] = tgt_mem_addr
;
9896 arm_insn_r
->mem_rec_count
= 1;
9898 else if (ARM_RECORD_STRD
== str_type
)
9900 record_buf_mem
[0] = 4;
9901 record_buf_mem
[1] = tgt_mem_addr
;
9902 record_buf_mem
[2] = 4;
9903 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9904 arm_insn_r
->mem_rec_count
= 2;
9906 /* Record Rn also as it changes. */
9907 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9908 arm_insn_r
->reg_rec_count
= 1;
9910 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
9911 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9913 /* 4) Store, register pre-indexed. */
9914 /* 6) Store, register post -indexed. */
9915 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9916 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9917 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9918 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9919 /* Calculate target store address, Rn +/- Rm, register offset. */
9920 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9922 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9926 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9928 if (ARM_RECORD_STRH
== str_type
)
9930 record_buf_mem
[0] = 2;
9931 record_buf_mem
[1] = tgt_mem_addr
;
9932 arm_insn_r
->mem_rec_count
= 1;
9934 else if (ARM_RECORD_STRD
== str_type
)
9936 record_buf_mem
[0] = 4;
9937 record_buf_mem
[1] = tgt_mem_addr
;
9938 record_buf_mem
[2] = 4;
9939 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9940 arm_insn_r
->mem_rec_count
= 2;
9942 /* Record Rn also as it changes. */
9943 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9944 arm_insn_r
->reg_rec_count
= 1;
9949 /* Handling ARM extension space insns. */
9952 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
9954 int ret
= 0; /* Return value: -1:record failure ; 0:success */
9955 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
9956 uint32_t record_buf
[8], record_buf_mem
[8];
9957 uint32_t reg_src1
= 0;
9958 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9959 ULONGEST u_regval
= 0;
9961 gdb_assert (!INSN_RECORDED(arm_insn_r
));
9962 /* Handle unconditional insn extension space. */
9964 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
9965 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9966 if (arm_insn_r
->cond
)
9968 /* PLD has no affect on architectural state, it just affects
9970 if (5 == ((opcode1
& 0xE0) >> 5))
9973 record_buf
[0] = ARM_PS_REGNUM
;
9974 record_buf
[1] = ARM_LR_REGNUM
;
9975 arm_insn_r
->reg_rec_count
= 2;
9977 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9981 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
9982 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
9985 /* Undefined instruction on ARM V5; need to handle if later
9986 versions define it. */
9989 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
9990 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9991 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
9993 /* Handle arithmetic insn extension space. */
9994 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
9995 && !INSN_RECORDED(arm_insn_r
))
9997 /* Handle MLA(S) and MUL(S). */
9998 if (in_inclusive_range (insn_op1
, 0U, 3U))
10000 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10001 record_buf
[1] = ARM_PS_REGNUM
;
10002 arm_insn_r
->reg_rec_count
= 2;
10004 else if (in_inclusive_range (insn_op1
, 4U, 15U))
10006 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10007 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10008 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10009 record_buf
[2] = ARM_PS_REGNUM
;
10010 arm_insn_r
->reg_rec_count
= 3;
10014 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
10015 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
10016 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
10018 /* Handle control insn extension space. */
10020 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
10021 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
10023 if (!bit (arm_insn_r
->arm_insn
,25))
10025 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
10027 if ((0 == insn_op1
) || (2 == insn_op1
))
10030 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10031 arm_insn_r
->reg_rec_count
= 1;
10033 else if (1 == insn_op1
)
10035 /* CSPR is going to be changed. */
10036 record_buf
[0] = ARM_PS_REGNUM
;
10037 arm_insn_r
->reg_rec_count
= 1;
10039 else if (3 == insn_op1
)
10041 /* SPSR is going to be changed. */
10042 /* We need to get SPSR value, which is yet to be done. */
10046 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
10051 record_buf
[0] = ARM_PS_REGNUM
;
10052 arm_insn_r
->reg_rec_count
= 1;
10054 else if (3 == insn_op1
)
10057 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10058 arm_insn_r
->reg_rec_count
= 1;
10061 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
10064 record_buf
[0] = ARM_PS_REGNUM
;
10065 record_buf
[1] = ARM_LR_REGNUM
;
10066 arm_insn_r
->reg_rec_count
= 2;
10068 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
10070 /* QADD, QSUB, QDADD, QDSUB */
10071 record_buf
[0] = ARM_PS_REGNUM
;
10072 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10073 arm_insn_r
->reg_rec_count
= 2;
10075 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
10078 record_buf
[0] = ARM_PS_REGNUM
;
10079 record_buf
[1] = ARM_LR_REGNUM
;
10080 arm_insn_r
->reg_rec_count
= 2;
10082 /* Save SPSR also;how? */
10085 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
10086 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
10087 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
10088 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
10091 if (0 == insn_op1
|| 1 == insn_op1
)
10093 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10094 /* We dont do optimization for SMULW<y> where we
10096 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10097 record_buf
[1] = ARM_PS_REGNUM
;
10098 arm_insn_r
->reg_rec_count
= 2;
10100 else if (2 == insn_op1
)
10103 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10104 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
10105 arm_insn_r
->reg_rec_count
= 2;
10107 else if (3 == insn_op1
)
10110 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10111 arm_insn_r
->reg_rec_count
= 1;
10117 /* MSR : immediate form. */
10120 /* CSPR is going to be changed. */
10121 record_buf
[0] = ARM_PS_REGNUM
;
10122 arm_insn_r
->reg_rec_count
= 1;
10124 else if (3 == insn_op1
)
10126 /* SPSR is going to be changed. */
10127 /* we need to get SPSR value, which is yet to be done */
10133 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10134 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
10135 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
10137 /* Handle load/store insn extension space. */
10139 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
10140 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
10141 && !INSN_RECORDED(arm_insn_r
))
10146 /* These insn, changes register and memory as well. */
10147 /* SWP or SWPB insn. */
10148 /* Get memory address given by Rn. */
10149 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10150 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
10151 /* SWP insn ?, swaps word. */
10152 if (8 == arm_insn_r
->opcode
)
10154 record_buf_mem
[0] = 4;
10158 /* SWPB insn, swaps only byte. */
10159 record_buf_mem
[0] = 1;
10161 record_buf_mem
[1] = u_regval
;
10162 arm_insn_r
->mem_rec_count
= 1;
10163 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10164 arm_insn_r
->reg_rec_count
= 1;
10166 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10169 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10172 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10175 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10176 record_buf
[1] = record_buf
[0] + 1;
10177 arm_insn_r
->reg_rec_count
= 2;
10179 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10182 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10185 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
10187 /* LDRH, LDRSB, LDRSH. */
10188 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10189 arm_insn_r
->reg_rec_count
= 1;
10194 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
10195 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
10196 && !INSN_RECORDED(arm_insn_r
))
10199 /* Handle coprocessor insn extension space. */
10202 /* To be done for ARMv5 and later; as of now we return -1. */
10206 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10207 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10212 /* Handling opcode 000 insns. */
10215 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
10217 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10218 uint32_t record_buf
[8], record_buf_mem
[8];
10219 ULONGEST u_regval
[2] = {0};
10221 uint32_t reg_src1
= 0;
10222 uint32_t opcode1
= 0;
10224 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10225 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10226 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10228 if (!((opcode1
& 0x19) == 0x10))
10230 /* Data-processing (register) and Data-processing (register-shifted
10232 /* Out of 11 shifter operands mode, all the insn modifies destination
10233 register, which is specified by 13-16 decode. */
10234 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10235 record_buf
[1] = ARM_PS_REGNUM
;
10236 arm_insn_r
->reg_rec_count
= 2;
10238 else if ((arm_insn_r
->decode
< 8) && ((opcode1
& 0x19) == 0x10))
10240 /* Miscellaneous instructions */
10242 if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
10243 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10245 /* Handle BLX, branch and link/exchange. */
10246 if (9 == arm_insn_r
->opcode
)
10248 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10249 and R14 stores the return address. */
10250 record_buf
[0] = ARM_PS_REGNUM
;
10251 record_buf
[1] = ARM_LR_REGNUM
;
10252 arm_insn_r
->reg_rec_count
= 2;
10255 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
10257 /* Handle enhanced software breakpoint insn, BKPT. */
10258 /* CPSR is changed to be executed in ARM state, disabling normal
10259 interrupts, entering abort mode. */
10260 /* According to high vector configuration PC is set. */
10261 /* user hit breakpoint and type reverse, in
10262 that case, we need to go back with previous CPSR and
10263 Program Counter. */
10264 record_buf
[0] = ARM_PS_REGNUM
;
10265 record_buf
[1] = ARM_LR_REGNUM
;
10266 arm_insn_r
->reg_rec_count
= 2;
10268 /* Save SPSR also; how? */
10271 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
10272 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10274 /* Handle BX, branch and link/exchange. */
10275 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10276 record_buf
[0] = ARM_PS_REGNUM
;
10277 arm_insn_r
->reg_rec_count
= 1;
10279 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
10280 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
10281 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
10283 /* Count leading zeros: CLZ. */
10284 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10285 arm_insn_r
->reg_rec_count
= 1;
10287 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
10288 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10289 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
10290 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0))
10292 /* Handle MRS insn. */
10293 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10294 arm_insn_r
->reg_rec_count
= 1;
10297 else if (9 == arm_insn_r
->decode
&& opcode1
< 0x10)
10299 /* Multiply and multiply-accumulate */
10301 /* Handle multiply instructions. */
10302 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10303 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
10305 /* Handle MLA and MUL. */
10306 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10307 record_buf
[1] = ARM_PS_REGNUM
;
10308 arm_insn_r
->reg_rec_count
= 2;
10310 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
10312 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10313 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10314 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10315 record_buf
[2] = ARM_PS_REGNUM
;
10316 arm_insn_r
->reg_rec_count
= 3;
10319 else if (9 == arm_insn_r
->decode
&& opcode1
> 0x10)
10321 /* Synchronization primitives */
10323 /* Handling SWP, SWPB. */
10324 /* These insn, changes register and memory as well. */
10325 /* SWP or SWPB insn. */
10327 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10328 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10329 /* SWP insn ?, swaps word. */
10330 if (8 == arm_insn_r
->opcode
)
10332 record_buf_mem
[0] = 4;
10336 /* SWPB insn, swaps only byte. */
10337 record_buf_mem
[0] = 1;
10339 record_buf_mem
[1] = u_regval
[0];
10340 arm_insn_r
->mem_rec_count
= 1;
10341 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10342 arm_insn_r
->reg_rec_count
= 1;
10344 else if (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
10345 || 15 == arm_insn_r
->decode
)
10347 if ((opcode1
& 0x12) == 2)
10349 /* Extra load/store (unprivileged) */
10354 /* Extra load/store */
10355 switch (bits (arm_insn_r
->arm_insn
, 5, 6))
10358 if ((opcode1
& 0x05) == 0x0 || (opcode1
& 0x05) == 0x4)
10360 /* STRH (register), STRH (immediate) */
10361 arm_record_strx (arm_insn_r
, &record_buf
[0],
10362 &record_buf_mem
[0], ARM_RECORD_STRH
);
10364 else if ((opcode1
& 0x05) == 0x1)
10366 /* LDRH (register) */
10367 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10368 arm_insn_r
->reg_rec_count
= 1;
10370 if (bit (arm_insn_r
->arm_insn
, 21))
10372 /* Write back to Rn. */
10373 record_buf
[arm_insn_r
->reg_rec_count
++]
10374 = bits (arm_insn_r
->arm_insn
, 16, 19);
10377 else if ((opcode1
& 0x05) == 0x5)
10379 /* LDRH (immediate), LDRH (literal) */
10380 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10382 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10383 arm_insn_r
->reg_rec_count
= 1;
10387 /*LDRH (immediate) */
10388 if (bit (arm_insn_r
->arm_insn
, 21))
10390 /* Write back to Rn. */
10391 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10399 if ((opcode1
& 0x05) == 0x0)
10401 /* LDRD (register) */
10402 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10403 record_buf
[1] = record_buf
[0] + 1;
10404 arm_insn_r
->reg_rec_count
= 2;
10406 if (bit (arm_insn_r
->arm_insn
, 21))
10408 /* Write back to Rn. */
10409 record_buf
[arm_insn_r
->reg_rec_count
++]
10410 = bits (arm_insn_r
->arm_insn
, 16, 19);
10413 else if ((opcode1
& 0x05) == 0x1)
10415 /* LDRSB (register) */
10416 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10417 arm_insn_r
->reg_rec_count
= 1;
10419 if (bit (arm_insn_r
->arm_insn
, 21))
10421 /* Write back to Rn. */
10422 record_buf
[arm_insn_r
->reg_rec_count
++]
10423 = bits (arm_insn_r
->arm_insn
, 16, 19);
10426 else if ((opcode1
& 0x05) == 0x4 || (opcode1
& 0x05) == 0x5)
10428 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10430 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10432 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10433 arm_insn_r
->reg_rec_count
= 1;
10437 /*LDRD (immediate), LDRSB (immediate) */
10438 if (bit (arm_insn_r
->arm_insn
, 21))
10440 /* Write back to Rn. */
10441 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10449 if ((opcode1
& 0x05) == 0x0)
10451 /* STRD (register) */
10452 arm_record_strx (arm_insn_r
, &record_buf
[0],
10453 &record_buf_mem
[0], ARM_RECORD_STRD
);
10455 else if ((opcode1
& 0x05) == 0x1)
10457 /* LDRSH (register) */
10458 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10459 arm_insn_r
->reg_rec_count
= 1;
10461 if (bit (arm_insn_r
->arm_insn
, 21))
10463 /* Write back to Rn. */
10464 record_buf
[arm_insn_r
->reg_rec_count
++]
10465 = bits (arm_insn_r
->arm_insn
, 16, 19);
10468 else if ((opcode1
& 0x05) == 0x4)
10470 /* STRD (immediate) */
10471 arm_record_strx (arm_insn_r
, &record_buf
[0],
10472 &record_buf_mem
[0], ARM_RECORD_STRD
);
10474 else if ((opcode1
& 0x05) == 0x5)
10476 /* LDRSH (immediate), LDRSH (literal) */
10477 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10478 arm_insn_r
->reg_rec_count
= 1;
10480 if (bit (arm_insn_r
->arm_insn
, 21))
10482 /* Write back to Rn. */
10483 record_buf
[arm_insn_r
->reg_rec_count
++]
10484 = bits (arm_insn_r
->arm_insn
, 16, 19);
10500 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10501 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10505 /* Handling opcode 001 insns. */
10508 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
10510 uint32_t record_buf
[8], record_buf_mem
[8];
10512 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10513 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10515 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
10516 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
10517 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
10520 /* Handle MSR insn. */
10521 if (9 == arm_insn_r
->opcode
)
10523 /* CSPR is going to be changed. */
10524 record_buf
[0] = ARM_PS_REGNUM
;
10525 arm_insn_r
->reg_rec_count
= 1;
10529 /* SPSR is going to be changed. */
10532 else if (arm_insn_r
->opcode
<= 15)
10534 /* Normal data processing insns. */
10535 /* Out of 11 shifter operands mode, all the insn modifies destination
10536 register, which is specified by 13-16 decode. */
10537 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10538 record_buf
[1] = ARM_PS_REGNUM
;
10539 arm_insn_r
->reg_rec_count
= 2;
10546 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10547 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10552 arm_record_media (insn_decode_record
*arm_insn_r
)
10554 uint32_t record_buf
[8];
10556 switch (bits (arm_insn_r
->arm_insn
, 22, 24))
10559 /* Parallel addition and subtraction, signed */
10561 /* Parallel addition and subtraction, unsigned */
10564 /* Packing, unpacking, saturation and reversal */
10566 int rd
= bits (arm_insn_r
->arm_insn
, 12, 15);
10568 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10574 /* Signed multiplies */
10576 int rd
= bits (arm_insn_r
->arm_insn
, 16, 19);
10577 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 22);
10579 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10581 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10582 else if (op1
== 0x4)
10583 record_buf
[arm_insn_r
->reg_rec_count
++]
10584 = bits (arm_insn_r
->arm_insn
, 12, 15);
10590 if (bit (arm_insn_r
->arm_insn
, 21)
10591 && bits (arm_insn_r
->arm_insn
, 5, 6) == 0x2)
10594 record_buf
[arm_insn_r
->reg_rec_count
++]
10595 = bits (arm_insn_r
->arm_insn
, 12, 15);
10597 else if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x0
10598 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x0)
10600 /* USAD8 and USADA8 */
10601 record_buf
[arm_insn_r
->reg_rec_count
++]
10602 = bits (arm_insn_r
->arm_insn
, 16, 19);
10609 if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x3
10610 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x7)
10612 /* Permanently UNDEFINED */
10617 /* BFC, BFI and UBFX */
10618 record_buf
[arm_insn_r
->reg_rec_count
++]
10619 = bits (arm_insn_r
->arm_insn
, 12, 15);
10628 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10633 /* Handle ARM mode instructions with opcode 010. */
10636 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
10638 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10640 uint32_t reg_base
, reg_dest
;
10641 uint32_t offset_12
, tgt_mem_addr
;
10642 uint32_t record_buf
[8], record_buf_mem
[8];
10643 unsigned char wback
;
10646 /* Calculate wback. */
10647 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
10648 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
10650 arm_insn_r
->reg_rec_count
= 0;
10651 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10653 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10655 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10658 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10659 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
10661 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10662 preceeds a LDR instruction having R15 as reg_base, it
10663 emulates a branch and link instruction, and hence we need to save
10664 CPSR and PC as well. */
10665 if (ARM_PC_REGNUM
== reg_dest
)
10666 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10668 /* If wback is true, also save the base register, which is going to be
10671 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10675 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10677 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
10678 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10680 /* Handle bit U. */
10681 if (bit (arm_insn_r
->arm_insn
, 23))
10683 /* U == 1: Add the offset. */
10684 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
10688 /* U == 0: subtract the offset. */
10689 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
10692 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10694 if (bit (arm_insn_r
->arm_insn
, 22))
10696 /* STRB and STRBT: 1 byte. */
10697 record_buf_mem
[0] = 1;
10701 /* STR and STRT: 4 bytes. */
10702 record_buf_mem
[0] = 4;
10705 /* Handle bit P. */
10706 if (bit (arm_insn_r
->arm_insn
, 24))
10707 record_buf_mem
[1] = tgt_mem_addr
;
10709 record_buf_mem
[1] = (uint32_t) u_regval
;
10711 arm_insn_r
->mem_rec_count
= 1;
10713 /* If wback is true, also save the base register, which is going to be
10716 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10719 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10720 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10724 /* Handling opcode 011 insns. */
10727 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
10729 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10731 uint32_t shift_imm
= 0;
10732 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
10733 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
10734 uint32_t record_buf
[8], record_buf_mem
[8];
10737 ULONGEST u_regval
[2];
10739 if (bit (arm_insn_r
->arm_insn
, 4))
10740 return arm_record_media (arm_insn_r
);
10742 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10743 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10745 /* Handle enhanced store insns and LDRD DSP insn,
10746 order begins according to addressing modes for store insns
10750 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10752 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10753 /* LDR insn has a capability to do branching, if
10754 MOV LR, PC is precedded by LDR insn having Rn as R15
10755 in that case, it emulates branch and link insn, and hence we
10756 need to save CSPR and PC as well. */
10757 if (15 != reg_dest
)
10759 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10760 arm_insn_r
->reg_rec_count
= 1;
10764 record_buf
[0] = reg_dest
;
10765 record_buf
[1] = ARM_PS_REGNUM
;
10766 arm_insn_r
->reg_rec_count
= 2;
10771 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
10773 /* Store insn, register offset and register pre-indexed,
10774 register post-indexed. */
10776 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10778 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10779 regcache_raw_read_unsigned (reg_cache
, reg_src1
10781 regcache_raw_read_unsigned (reg_cache
, reg_src2
10783 if (15 == reg_src2
)
10785 /* If R15 was used as Rn, hence current PC+8. */
10786 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10787 u_regval
[0] = u_regval
[0] + 8;
10789 /* Calculate target store address, Rn +/- Rm, register offset. */
10791 if (bit (arm_insn_r
->arm_insn
, 23))
10793 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10797 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10800 switch (arm_insn_r
->opcode
)
10814 record_buf_mem
[0] = 4;
10829 record_buf_mem
[0] = 1;
10833 gdb_assert_not_reached ("no decoding pattern found");
10836 record_buf_mem
[1] = tgt_mem_addr
;
10837 arm_insn_r
->mem_rec_count
= 1;
10839 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10840 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10841 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10842 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10843 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10844 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10847 /* Rn is going to be changed in pre-indexed mode and
10848 post-indexed mode as well. */
10849 record_buf
[0] = reg_src2
;
10850 arm_insn_r
->reg_rec_count
= 1;
10855 /* Store insn, scaled register offset; scaled pre-indexed. */
10856 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
10858 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10860 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10861 /* Get shift_imm. */
10862 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
10863 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10864 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
10865 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10866 /* Offset_12 used as shift. */
10870 /* Offset_12 used as index. */
10871 offset_12
= u_regval
[0] << shift_imm
;
10875 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
10881 if (bit (u_regval
[0], 31))
10883 offset_12
= 0xFFFFFFFF;
10892 /* This is arithmetic shift. */
10893 offset_12
= s_word
>> shift_imm
;
10900 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
10902 /* Get C flag value and shift it by 31. */
10903 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
10904 | (u_regval
[0]) >> 1);
10908 offset_12
= (u_regval
[0] >> shift_imm
) \
10910 (sizeof(uint32_t) - shift_imm
));
10915 gdb_assert_not_reached ("no decoding pattern found");
10919 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10921 if (bit (arm_insn_r
->arm_insn
, 23))
10923 tgt_mem_addr
= u_regval
[1] + offset_12
;
10927 tgt_mem_addr
= u_regval
[1] - offset_12
;
10930 switch (arm_insn_r
->opcode
)
10944 record_buf_mem
[0] = 4;
10959 record_buf_mem
[0] = 1;
10963 gdb_assert_not_reached ("no decoding pattern found");
10966 record_buf_mem
[1] = tgt_mem_addr
;
10967 arm_insn_r
->mem_rec_count
= 1;
10969 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10970 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10971 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10972 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10973 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10974 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10977 /* Rn is going to be changed in register scaled pre-indexed
10978 mode,and scaled post indexed mode. */
10979 record_buf
[0] = reg_src2
;
10980 arm_insn_r
->reg_rec_count
= 1;
10985 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10986 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10990 /* Handle ARM mode instructions with opcode 100. */
10993 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
10995 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10996 uint32_t register_count
= 0, register_bits
;
10997 uint32_t reg_base
, addr_mode
;
10998 uint32_t record_buf
[24], record_buf_mem
[48];
11002 /* Fetch the list of registers. */
11003 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
11004 arm_insn_r
->reg_rec_count
= 0;
11006 /* Fetch the base register that contains the address we are loading data
11008 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11010 /* Calculate wback. */
11011 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
11013 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11015 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11017 /* Find out which registers are going to be loaded from memory. */
11018 while (register_bits
)
11020 if (register_bits
& 0x00000001)
11021 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
11022 register_bits
= register_bits
>> 1;
11027 /* If wback is true, also save the base register, which is going to be
11030 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11032 /* Save the CPSR register. */
11033 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11037 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11039 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
11041 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11043 /* Find out how many registers are going to be stored to memory. */
11044 while (register_bits
)
11046 if (register_bits
& 0x00000001)
11048 register_bits
= register_bits
>> 1;
11053 /* STMDA (STMED): Decrement after. */
11055 record_buf_mem
[1] = (uint32_t) u_regval
11056 - register_count
* INT_REGISTER_SIZE
+ 4;
11058 /* STM (STMIA, STMEA): Increment after. */
11060 record_buf_mem
[1] = (uint32_t) u_regval
;
11062 /* STMDB (STMFD): Decrement before. */
11064 record_buf_mem
[1] = (uint32_t) u_regval
11065 - register_count
* INT_REGISTER_SIZE
;
11067 /* STMIB (STMFA): Increment before. */
11069 record_buf_mem
[1] = (uint32_t) u_regval
+ INT_REGISTER_SIZE
;
11072 gdb_assert_not_reached ("no decoding pattern found");
11076 record_buf_mem
[0] = register_count
* INT_REGISTER_SIZE
;
11077 arm_insn_r
->mem_rec_count
= 1;
11079 /* If wback is true, also save the base register, which is going to be
11082 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11085 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11086 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11090 /* Handling opcode 101 insns. */
11093 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11095 uint32_t record_buf
[8];
11097 /* Handle B, BL, BLX(1) insns. */
11098 /* B simply branches so we do nothing here. */
11099 /* Note: BLX(1) doesnt fall here but instead it falls into
11100 extension space. */
11101 if (bit (arm_insn_r
->arm_insn
, 24))
11103 record_buf
[0] = ARM_LR_REGNUM
;
11104 arm_insn_r
->reg_rec_count
= 1;
11107 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11113 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11115 printf_unfiltered (_("Process record does not support instruction "
11116 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11117 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11122 /* Record handler for vector data transfer instructions. */
11125 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11127 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11128 uint32_t record_buf
[4];
11130 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11131 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11132 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11133 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11134 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11136 /* Handle VMOV instruction. */
11137 if (bit_l
&& bit_c
)
11139 record_buf
[0] = reg_t
;
11140 arm_insn_r
->reg_rec_count
= 1;
11142 else if (bit_l
&& !bit_c
)
11144 /* Handle VMOV instruction. */
11145 if (bits_a
== 0x00)
11147 record_buf
[0] = reg_t
;
11148 arm_insn_r
->reg_rec_count
= 1;
11150 /* Handle VMRS instruction. */
11151 else if (bits_a
== 0x07)
11154 reg_t
= ARM_PS_REGNUM
;
11156 record_buf
[0] = reg_t
;
11157 arm_insn_r
->reg_rec_count
= 1;
11160 else if (!bit_l
&& !bit_c
)
11162 /* Handle VMOV instruction. */
11163 if (bits_a
== 0x00)
11165 record_buf
[0] = ARM_D0_REGNUM
+ reg_v
;
11167 arm_insn_r
->reg_rec_count
= 1;
11169 /* Handle VMSR instruction. */
11170 else if (bits_a
== 0x07)
11172 record_buf
[0] = ARM_FPSCR_REGNUM
;
11173 arm_insn_r
->reg_rec_count
= 1;
11176 else if (!bit_l
&& bit_c
)
11178 /* Handle VMOV instruction. */
11179 if (!(bits_a
& 0x04))
11181 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
11183 arm_insn_r
->reg_rec_count
= 1;
11185 /* Handle VDUP instruction. */
11188 if (bit (arm_insn_r
->arm_insn
, 21))
11190 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11191 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11192 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
11193 arm_insn_r
->reg_rec_count
= 2;
11197 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11198 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11199 arm_insn_r
->reg_rec_count
= 1;
11204 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11208 /* Record handler for extension register load/store instructions. */
11211 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
11213 uint32_t opcode
, single_reg
;
11214 uint8_t op_vldm_vstm
;
11215 uint32_t record_buf
[8], record_buf_mem
[128];
11216 ULONGEST u_regval
= 0;
11218 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11220 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
11221 single_reg
= !bit (arm_insn_r
->arm_insn
, 8);
11222 op_vldm_vstm
= opcode
& 0x1b;
11224 /* Handle VMOV instructions. */
11225 if ((opcode
& 0x1e) == 0x04)
11227 if (bit (arm_insn_r
->arm_insn
, 20)) /* to_arm_registers bit 20? */
11229 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11230 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11231 arm_insn_r
->reg_rec_count
= 2;
11235 uint8_t reg_m
= bits (arm_insn_r
->arm_insn
, 0, 3);
11236 uint8_t bit_m
= bit (arm_insn_r
->arm_insn
, 5);
11240 /* The first S register number m is REG_M:M (M is bit 5),
11241 the corresponding D register number is REG_M:M / 2, which
11243 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_D0_REGNUM
+ reg_m
;
11244 /* The second S register number is REG_M:M + 1, the
11245 corresponding D register number is (REG_M:M + 1) / 2.
11246 IOW, if bit M is 1, the first and second S registers
11247 are mapped to different D registers, otherwise, they are
11248 in the same D register. */
11251 record_buf
[arm_insn_r
->reg_rec_count
++]
11252 = ARM_D0_REGNUM
+ reg_m
+ 1;
11257 record_buf
[0] = ((bit_m
<< 4) + reg_m
+ ARM_D0_REGNUM
);
11258 arm_insn_r
->reg_rec_count
= 1;
11262 /* Handle VSTM and VPUSH instructions. */
11263 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
11264 || op_vldm_vstm
== 0x12)
11266 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
11267 uint32_t memory_index
= 0;
11269 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11270 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11271 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11272 imm_off32
= imm_off8
<< 2;
11273 memory_count
= imm_off8
;
11275 if (bit (arm_insn_r
->arm_insn
, 23))
11276 start_address
= u_regval
;
11278 start_address
= u_regval
- imm_off32
;
11280 if (bit (arm_insn_r
->arm_insn
, 21))
11282 record_buf
[0] = reg_rn
;
11283 arm_insn_r
->reg_rec_count
= 1;
11286 while (memory_count
> 0)
11290 record_buf_mem
[memory_index
] = 4;
11291 record_buf_mem
[memory_index
+ 1] = start_address
;
11292 start_address
= start_address
+ 4;
11293 memory_index
= memory_index
+ 2;
11297 record_buf_mem
[memory_index
] = 4;
11298 record_buf_mem
[memory_index
+ 1] = start_address
;
11299 record_buf_mem
[memory_index
+ 2] = 4;
11300 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11301 start_address
= start_address
+ 8;
11302 memory_index
= memory_index
+ 4;
11306 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
11308 /* Handle VLDM instructions. */
11309 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
11310 || op_vldm_vstm
== 0x13)
11312 uint32_t reg_count
, reg_vd
;
11313 uint32_t reg_index
= 0;
11314 uint32_t bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11316 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11317 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
11319 /* REG_VD is the first D register number. If the instruction
11320 loads memory to S registers (SINGLE_REG is TRUE), the register
11321 number is (REG_VD << 1 | bit D), so the corresponding D
11322 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11324 reg_vd
= reg_vd
| (bit_d
<< 4);
11326 if (bit (arm_insn_r
->arm_insn
, 21) /* write back */)
11327 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
11329 /* If the instruction loads memory to D register, REG_COUNT should
11330 be divided by 2, according to the ARM Architecture Reference
11331 Manual. If the instruction loads memory to S register, divide by
11332 2 as well because two S registers are mapped to D register. */
11333 reg_count
= reg_count
/ 2;
11334 if (single_reg
&& bit_d
)
11336 /* Increase the register count if S register list starts from
11337 an odd number (bit d is one). */
11341 while (reg_count
> 0)
11343 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
11346 arm_insn_r
->reg_rec_count
= reg_index
;
11348 /* VSTR Vector store register. */
11349 else if ((opcode
& 0x13) == 0x10)
11351 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
;
11352 uint32_t memory_index
= 0;
11354 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11355 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11356 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11357 imm_off32
= imm_off8
<< 2;
11359 if (bit (arm_insn_r
->arm_insn
, 23))
11360 start_address
= u_regval
+ imm_off32
;
11362 start_address
= u_regval
- imm_off32
;
11366 record_buf_mem
[memory_index
] = 4;
11367 record_buf_mem
[memory_index
+ 1] = start_address
;
11368 arm_insn_r
->mem_rec_count
= 1;
11372 record_buf_mem
[memory_index
] = 4;
11373 record_buf_mem
[memory_index
+ 1] = start_address
;
11374 record_buf_mem
[memory_index
+ 2] = 4;
11375 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11376 arm_insn_r
->mem_rec_count
= 2;
11379 /* VLDR Vector load register. */
11380 else if ((opcode
& 0x13) == 0x11)
11382 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11386 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
11387 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
11391 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
11392 /* Record register D rather than pseudo register S. */
11393 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
/ 2;
11395 arm_insn_r
->reg_rec_count
= 1;
11398 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11399 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11403 /* Record handler for arm/thumb mode VFP data processing instructions. */
11406 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
11408 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
11409 uint32_t record_buf
[4];
11410 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
11411 enum insn_types curr_insn_type
= INSN_INV
;
11413 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11414 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11415 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11416 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
11417 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
11418 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11419 /* Mask off the "D" bit. */
11420 opc1
= opc1
& ~0x04;
11422 /* Handle VMLA, VMLS. */
11425 if (bit (arm_insn_r
->arm_insn
, 10))
11427 if (bit (arm_insn_r
->arm_insn
, 6))
11428 curr_insn_type
= INSN_T0
;
11430 curr_insn_type
= INSN_T1
;
11435 curr_insn_type
= INSN_T1
;
11437 curr_insn_type
= INSN_T2
;
11440 /* Handle VNMLA, VNMLS, VNMUL. */
11441 else if (opc1
== 0x01)
11444 curr_insn_type
= INSN_T1
;
11446 curr_insn_type
= INSN_T2
;
11449 else if (opc1
== 0x02 && !(opc3
& 0x01))
11451 if (bit (arm_insn_r
->arm_insn
, 10))
11453 if (bit (arm_insn_r
->arm_insn
, 6))
11454 curr_insn_type
= INSN_T0
;
11456 curr_insn_type
= INSN_T1
;
11461 curr_insn_type
= INSN_T1
;
11463 curr_insn_type
= INSN_T2
;
11466 /* Handle VADD, VSUB. */
11467 else if (opc1
== 0x03)
11469 if (!bit (arm_insn_r
->arm_insn
, 9))
11471 if (bit (arm_insn_r
->arm_insn
, 6))
11472 curr_insn_type
= INSN_T0
;
11474 curr_insn_type
= INSN_T1
;
11479 curr_insn_type
= INSN_T1
;
11481 curr_insn_type
= INSN_T2
;
11485 else if (opc1
== 0x08)
11488 curr_insn_type
= INSN_T1
;
11490 curr_insn_type
= INSN_T2
;
11492 /* Handle all other vfp data processing instructions. */
11493 else if (opc1
== 0x0b)
11496 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
11498 if (bit (arm_insn_r
->arm_insn
, 4))
11500 if (bit (arm_insn_r
->arm_insn
, 6))
11501 curr_insn_type
= INSN_T0
;
11503 curr_insn_type
= INSN_T1
;
11508 curr_insn_type
= INSN_T1
;
11510 curr_insn_type
= INSN_T2
;
11513 /* Handle VNEG and VABS. */
11514 else if ((opc2
== 0x01 && opc3
== 0x01)
11515 || (opc2
== 0x00 && opc3
== 0x03))
11517 if (!bit (arm_insn_r
->arm_insn
, 11))
11519 if (bit (arm_insn_r
->arm_insn
, 6))
11520 curr_insn_type
= INSN_T0
;
11522 curr_insn_type
= INSN_T1
;
11527 curr_insn_type
= INSN_T1
;
11529 curr_insn_type
= INSN_T2
;
11532 /* Handle VSQRT. */
11533 else if (opc2
== 0x01 && opc3
== 0x03)
11536 curr_insn_type
= INSN_T1
;
11538 curr_insn_type
= INSN_T2
;
11541 else if (opc2
== 0x07 && opc3
== 0x03)
11544 curr_insn_type
= INSN_T1
;
11546 curr_insn_type
= INSN_T2
;
11548 else if (opc3
& 0x01)
11551 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
11553 if (!bit (arm_insn_r
->arm_insn
, 18))
11554 curr_insn_type
= INSN_T2
;
11558 curr_insn_type
= INSN_T1
;
11560 curr_insn_type
= INSN_T2
;
11564 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
11567 curr_insn_type
= INSN_T1
;
11569 curr_insn_type
= INSN_T2
;
11571 /* Handle VCVTB, VCVTT. */
11572 else if ((opc2
& 0x0e) == 0x02)
11573 curr_insn_type
= INSN_T2
;
11574 /* Handle VCMP, VCMPE. */
11575 else if ((opc2
& 0x0e) == 0x04)
11576 curr_insn_type
= INSN_T3
;
11580 switch (curr_insn_type
)
11583 reg_vd
= reg_vd
| (bit_d
<< 4);
11584 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11585 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
11586 arm_insn_r
->reg_rec_count
= 2;
11590 reg_vd
= reg_vd
| (bit_d
<< 4);
11591 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11592 arm_insn_r
->reg_rec_count
= 1;
11596 reg_vd
= (reg_vd
<< 1) | bit_d
;
11597 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11598 arm_insn_r
->reg_rec_count
= 1;
11602 record_buf
[0] = ARM_FPSCR_REGNUM
;
11603 arm_insn_r
->reg_rec_count
= 1;
11607 gdb_assert_not_reached ("no decoding pattern found");
11611 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11615 /* Handling opcode 110 insns. */
11618 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
11620 uint32_t op1
, op1_ebit
, coproc
;
11622 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11623 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11624 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11626 if ((coproc
& 0x0e) == 0x0a)
11628 /* Handle extension register ld/st instructions. */
11630 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11632 /* 64-bit transfers between arm core and extension registers. */
11633 if ((op1
& 0x3e) == 0x04)
11634 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11638 /* Handle coprocessor ld/st instructions. */
11643 return arm_record_unsupported_insn (arm_insn_r
);
11646 return arm_record_unsupported_insn (arm_insn_r
);
11649 /* Move to coprocessor from two arm core registers. */
11651 return arm_record_unsupported_insn (arm_insn_r
);
11653 /* Move to two arm core registers from coprocessor. */
11658 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11659 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11660 arm_insn_r
->reg_rec_count
= 2;
11662 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
11666 return arm_record_unsupported_insn (arm_insn_r
);
11669 /* Handling opcode 111 insns. */
11672 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
11674 uint32_t op
, op1_ebit
, coproc
, bits_24_25
;
11675 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
11676 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11678 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
11679 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11680 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11681 op
= bit (arm_insn_r
->arm_insn
, 4);
11682 bits_24_25
= bits (arm_insn_r
->arm_insn
, 24, 25);
11684 /* Handle arm SWI/SVC system call instructions. */
11685 if (bits_24_25
== 0x3)
11687 if (tdep
->arm_syscall_record
!= NULL
)
11689 ULONGEST svc_operand
, svc_number
;
11691 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
11693 if (svc_operand
) /* OABI. */
11694 svc_number
= svc_operand
- 0x900000;
11696 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
11698 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
11702 printf_unfiltered (_("no syscall record support\n"));
11706 else if (bits_24_25
== 0x02)
11710 if ((coproc
& 0x0e) == 0x0a)
11712 /* 8, 16, and 32-bit transfer */
11713 return arm_record_vdata_transfer_insn (arm_insn_r
);
11720 uint32_t record_buf
[1];
11722 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11723 if (record_buf
[0] == 15)
11724 record_buf
[0] = ARM_PS_REGNUM
;
11726 arm_insn_r
->reg_rec_count
= 1;
11727 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
11740 if ((coproc
& 0x0e) == 0x0a)
11742 /* VFP data-processing instructions. */
11743 return arm_record_vfp_data_proc_insn (arm_insn_r
);
11754 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11758 if ((coproc
& 0x0e) != 0x0a)
11764 else if (op1
== 4 || op1
== 5)
11766 if ((coproc
& 0x0e) == 0x0a)
11768 /* 64-bit transfers between ARM core and extension */
11777 else if (op1
== 0 || op1
== 1)
11784 if ((coproc
& 0x0e) == 0x0a)
11786 /* Extension register load/store */
11790 /* STC, STC2, LDC, LDC2 */
11799 /* Handling opcode 000 insns. */
11802 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
11804 uint32_t record_buf
[8];
11805 uint32_t reg_src1
= 0;
11807 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11809 record_buf
[0] = ARM_PS_REGNUM
;
11810 record_buf
[1] = reg_src1
;
11811 thumb_insn_r
->reg_rec_count
= 2;
11813 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11819 /* Handling opcode 001 insns. */
11822 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
11824 uint32_t record_buf
[8];
11825 uint32_t reg_src1
= 0;
11827 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11829 record_buf
[0] = ARM_PS_REGNUM
;
11830 record_buf
[1] = reg_src1
;
11831 thumb_insn_r
->reg_rec_count
= 2;
11833 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11838 /* Handling opcode 010 insns. */
11841 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
11843 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11844 uint32_t record_buf
[8], record_buf_mem
[8];
11846 uint32_t reg_src1
= 0, reg_src2
= 0;
11847 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
11849 ULONGEST u_regval
[2] = {0};
11851 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
11853 if (bit (thumb_insn_r
->arm_insn
, 12))
11855 /* Handle load/store register offset. */
11856 uint32_t opB
= bits (thumb_insn_r
->arm_insn
, 9, 11);
11858 if (in_inclusive_range (opB
, 4U, 7U))
11860 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11861 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
11862 record_buf
[0] = reg_src1
;
11863 thumb_insn_r
->reg_rec_count
= 1;
11865 else if (in_inclusive_range (opB
, 0U, 2U))
11867 /* STR(2), STRB(2), STRH(2) . */
11868 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11869 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
11870 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11871 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11873 record_buf_mem
[0] = 4; /* STR (2). */
11875 record_buf_mem
[0] = 1; /* STRB (2). */
11877 record_buf_mem
[0] = 2; /* STRH (2). */
11878 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
11879 thumb_insn_r
->mem_rec_count
= 1;
11882 else if (bit (thumb_insn_r
->arm_insn
, 11))
11884 /* Handle load from literal pool. */
11886 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11887 record_buf
[0] = reg_src1
;
11888 thumb_insn_r
->reg_rec_count
= 1;
11892 /* Special data instructions and branch and exchange */
11893 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
11894 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11895 if ((3 == opcode2
) && (!opcode3
))
11897 /* Branch with exchange. */
11898 record_buf
[0] = ARM_PS_REGNUM
;
11899 thumb_insn_r
->reg_rec_count
= 1;
11903 /* Format 8; special data processing insns. */
11904 record_buf
[0] = ARM_PS_REGNUM
;
11905 record_buf
[1] = (bit (thumb_insn_r
->arm_insn
, 7) << 3
11906 | bits (thumb_insn_r
->arm_insn
, 0, 2));
11907 thumb_insn_r
->reg_rec_count
= 2;
11912 /* Format 5; data processing insns. */
11913 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11914 if (bit (thumb_insn_r
->arm_insn
, 7))
11916 reg_src1
= reg_src1
+ 8;
11918 record_buf
[0] = ARM_PS_REGNUM
;
11919 record_buf
[1] = reg_src1
;
11920 thumb_insn_r
->reg_rec_count
= 2;
11923 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11924 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11930 /* Handling opcode 001 insns. */
11933 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
11935 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11936 uint32_t record_buf
[8], record_buf_mem
[8];
11938 uint32_t reg_src1
= 0;
11939 uint32_t opcode
= 0, immed_5
= 0;
11941 ULONGEST u_regval
= 0;
11943 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11948 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11949 record_buf
[0] = reg_src1
;
11950 thumb_insn_r
->reg_rec_count
= 1;
11955 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11956 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11957 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11958 record_buf_mem
[0] = 4;
11959 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
11960 thumb_insn_r
->mem_rec_count
= 1;
11963 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11964 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11970 /* Handling opcode 100 insns. */
11973 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
11975 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11976 uint32_t record_buf
[8], record_buf_mem
[8];
11978 uint32_t reg_src1
= 0;
11979 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
11981 ULONGEST u_regval
= 0;
11983 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11988 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11989 record_buf
[0] = reg_src1
;
11990 thumb_insn_r
->reg_rec_count
= 1;
11992 else if (1 == opcode
)
11995 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11996 record_buf
[0] = reg_src1
;
11997 thumb_insn_r
->reg_rec_count
= 1;
11999 else if (2 == opcode
)
12002 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12003 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12004 record_buf_mem
[0] = 4;
12005 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
12006 thumb_insn_r
->mem_rec_count
= 1;
12008 else if (0 == opcode
)
12011 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12012 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12013 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12014 record_buf_mem
[0] = 2;
12015 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
12016 thumb_insn_r
->mem_rec_count
= 1;
12019 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12020 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12026 /* Handling opcode 101 insns. */
12029 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
12031 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12033 uint32_t opcode
= 0;
12034 uint32_t register_bits
= 0, register_count
= 0;
12035 uint32_t index
= 0, start_address
= 0;
12036 uint32_t record_buf
[24], record_buf_mem
[48];
12039 ULONGEST u_regval
= 0;
12041 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12043 if (opcode
== 0 || opcode
== 1)
12045 /* ADR and ADD (SP plus immediate) */
12047 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12048 record_buf
[0] = reg_src1
;
12049 thumb_insn_r
->reg_rec_count
= 1;
12053 /* Miscellaneous 16-bit instructions */
12054 uint32_t opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 11);
12059 /* SETEND and CPS */
12062 /* ADD/SUB (SP plus immediate) */
12063 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12064 record_buf
[0] = ARM_SP_REGNUM
;
12065 thumb_insn_r
->reg_rec_count
= 1;
12067 case 1: /* fall through */
12068 case 3: /* fall through */
12069 case 9: /* fall through */
12074 /* SXTH, SXTB, UXTH, UXTB */
12075 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
12076 thumb_insn_r
->reg_rec_count
= 1;
12078 case 4: /* fall through */
12081 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12082 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12083 while (register_bits
)
12085 if (register_bits
& 0x00000001)
12087 register_bits
= register_bits
>> 1;
12089 start_address
= u_regval
- \
12090 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12091 thumb_insn_r
->mem_rec_count
= register_count
;
12092 while (register_count
)
12094 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12095 record_buf_mem
[(register_count
* 2) - 2] = 4;
12096 start_address
= start_address
+ 4;
12099 record_buf
[0] = ARM_SP_REGNUM
;
12100 thumb_insn_r
->reg_rec_count
= 1;
12103 /* REV, REV16, REVSH */
12104 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
12105 thumb_insn_r
->reg_rec_count
= 1;
12107 case 12: /* fall through */
12110 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12111 while (register_bits
)
12113 if (register_bits
& 0x00000001)
12114 record_buf
[index
++] = register_count
;
12115 register_bits
= register_bits
>> 1;
12118 record_buf
[index
++] = ARM_PS_REGNUM
;
12119 record_buf
[index
++] = ARM_SP_REGNUM
;
12120 thumb_insn_r
->reg_rec_count
= index
;
12124 /* Handle enhanced software breakpoint insn, BKPT. */
12125 /* CPSR is changed to be executed in ARM state, disabling normal
12126 interrupts, entering abort mode. */
12127 /* According to high vector configuration PC is set. */
12128 /* User hits breakpoint and type reverse, in that case, we need to go back with
12129 previous CPSR and Program Counter. */
12130 record_buf
[0] = ARM_PS_REGNUM
;
12131 record_buf
[1] = ARM_LR_REGNUM
;
12132 thumb_insn_r
->reg_rec_count
= 2;
12133 /* We need to save SPSR value, which is not yet done. */
12134 printf_unfiltered (_("Process record does not support instruction "
12135 "0x%0x at address %s.\n"),
12136 thumb_insn_r
->arm_insn
,
12137 paddress (thumb_insn_r
->gdbarch
,
12138 thumb_insn_r
->this_addr
));
12142 /* If-Then, and hints */
12149 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12150 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12156 /* Handling opcode 110 insns. */
12159 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12161 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12162 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12164 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12165 uint32_t reg_src1
= 0;
12166 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12167 uint32_t index
= 0, start_address
= 0;
12168 uint32_t record_buf
[24], record_buf_mem
[48];
12170 ULONGEST u_regval
= 0;
12172 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12173 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12179 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12181 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12182 while (register_bits
)
12184 if (register_bits
& 0x00000001)
12185 record_buf
[index
++] = register_count
;
12186 register_bits
= register_bits
>> 1;
12189 record_buf
[index
++] = reg_src1
;
12190 thumb_insn_r
->reg_rec_count
= index
;
12192 else if (0 == opcode2
)
12194 /* It handles both STMIA. */
12195 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12197 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12198 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12199 while (register_bits
)
12201 if (register_bits
& 0x00000001)
12203 register_bits
= register_bits
>> 1;
12205 start_address
= u_regval
;
12206 thumb_insn_r
->mem_rec_count
= register_count
;
12207 while (register_count
)
12209 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12210 record_buf_mem
[(register_count
* 2) - 2] = 4;
12211 start_address
= start_address
+ 4;
12215 else if (0x1F == opcode1
)
12217 /* Handle arm syscall insn. */
12218 if (tdep
->arm_syscall_record
!= NULL
)
12220 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12221 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12225 printf_unfiltered (_("no syscall record support\n"));
12230 /* B (1), conditional branch is automatically taken care in process_record,
12231 as PC is saved there. */
12233 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12234 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12240 /* Handling opcode 111 insns. */
12243 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12245 uint32_t record_buf
[8];
12246 uint32_t bits_h
= 0;
12248 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12250 if (2 == bits_h
|| 3 == bits_h
)
12253 record_buf
[0] = ARM_LR_REGNUM
;
12254 thumb_insn_r
->reg_rec_count
= 1;
12256 else if (1 == bits_h
)
12259 record_buf
[0] = ARM_PS_REGNUM
;
12260 record_buf
[1] = ARM_LR_REGNUM
;
12261 thumb_insn_r
->reg_rec_count
= 2;
12264 /* B(2) is automatically taken care in process_record, as PC is
12267 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12272 /* Handler for thumb2 load/store multiple instructions. */
12275 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
12277 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12279 uint32_t reg_rn
, op
;
12280 uint32_t register_bits
= 0, register_count
= 0;
12281 uint32_t index
= 0, start_address
= 0;
12282 uint32_t record_buf
[24], record_buf_mem
[48];
12284 ULONGEST u_regval
= 0;
12286 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12287 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12289 if (0 == op
|| 3 == op
)
12291 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12293 /* Handle RFE instruction. */
12294 record_buf
[0] = ARM_PS_REGNUM
;
12295 thumb2_insn_r
->reg_rec_count
= 1;
12299 /* Handle SRS instruction after reading banked SP. */
12300 return arm_record_unsupported_insn (thumb2_insn_r
);
12303 else if (1 == op
|| 2 == op
)
12305 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12307 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12308 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12309 while (register_bits
)
12311 if (register_bits
& 0x00000001)
12312 record_buf
[index
++] = register_count
;
12315 register_bits
= register_bits
>> 1;
12317 record_buf
[index
++] = reg_rn
;
12318 record_buf
[index
++] = ARM_PS_REGNUM
;
12319 thumb2_insn_r
->reg_rec_count
= index
;
12323 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12324 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12325 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12326 while (register_bits
)
12328 if (register_bits
& 0x00000001)
12331 register_bits
= register_bits
>> 1;
12336 /* Start address calculation for LDMDB/LDMEA. */
12337 start_address
= u_regval
;
12341 /* Start address calculation for LDMDB/LDMEA. */
12342 start_address
= u_regval
- register_count
* 4;
12345 thumb2_insn_r
->mem_rec_count
= register_count
;
12346 while (register_count
)
12348 record_buf_mem
[register_count
* 2 - 1] = start_address
;
12349 record_buf_mem
[register_count
* 2 - 2] = 4;
12350 start_address
= start_address
+ 4;
12353 record_buf
[0] = reg_rn
;
12354 record_buf
[1] = ARM_PS_REGNUM
;
12355 thumb2_insn_r
->reg_rec_count
= 2;
12359 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12361 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12363 return ARM_RECORD_SUCCESS
;
12366 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12370 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
12372 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12374 uint32_t reg_rd
, reg_rn
, offset_imm
;
12375 uint32_t reg_dest1
, reg_dest2
;
12376 uint32_t address
, offset_addr
;
12377 uint32_t record_buf
[8], record_buf_mem
[8];
12378 uint32_t op1
, op2
, op3
;
12380 ULONGEST u_regval
[2];
12382 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12383 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
12384 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12386 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12388 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
12390 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12391 record_buf
[0] = reg_dest1
;
12392 record_buf
[1] = ARM_PS_REGNUM
;
12393 thumb2_insn_r
->reg_rec_count
= 2;
12396 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
12398 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12399 record_buf
[2] = reg_dest2
;
12400 thumb2_insn_r
->reg_rec_count
= 3;
12405 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12406 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12408 if (0 == op1
&& 0 == op2
)
12410 /* Handle STREX. */
12411 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12412 address
= u_regval
[0] + (offset_imm
* 4);
12413 record_buf_mem
[0] = 4;
12414 record_buf_mem
[1] = address
;
12415 thumb2_insn_r
->mem_rec_count
= 1;
12416 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12417 record_buf
[0] = reg_rd
;
12418 thumb2_insn_r
->reg_rec_count
= 1;
12420 else if (1 == op1
&& 0 == op2
)
12422 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12423 record_buf
[0] = reg_rd
;
12424 thumb2_insn_r
->reg_rec_count
= 1;
12425 address
= u_regval
[0];
12426 record_buf_mem
[1] = address
;
12430 /* Handle STREXB. */
12431 record_buf_mem
[0] = 1;
12432 thumb2_insn_r
->mem_rec_count
= 1;
12436 /* Handle STREXH. */
12437 record_buf_mem
[0] = 2 ;
12438 thumb2_insn_r
->mem_rec_count
= 1;
12442 /* Handle STREXD. */
12443 address
= u_regval
[0];
12444 record_buf_mem
[0] = 4;
12445 record_buf_mem
[2] = 4;
12446 record_buf_mem
[3] = address
+ 4;
12447 thumb2_insn_r
->mem_rec_count
= 2;
12452 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12454 if (bit (thumb2_insn_r
->arm_insn
, 24))
12456 if (bit (thumb2_insn_r
->arm_insn
, 23))
12457 offset_addr
= u_regval
[0] + (offset_imm
* 4);
12459 offset_addr
= u_regval
[0] - (offset_imm
* 4);
12461 address
= offset_addr
;
12464 address
= u_regval
[0];
12466 record_buf_mem
[0] = 4;
12467 record_buf_mem
[1] = address
;
12468 record_buf_mem
[2] = 4;
12469 record_buf_mem
[3] = address
+ 4;
12470 thumb2_insn_r
->mem_rec_count
= 2;
12471 record_buf
[0] = reg_rn
;
12472 thumb2_insn_r
->reg_rec_count
= 1;
12476 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12478 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12480 return ARM_RECORD_SUCCESS
;
12483 /* Handler for thumb2 data processing (shift register and modified immediate)
12487 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
12489 uint32_t reg_rd
, op
;
12490 uint32_t record_buf
[8];
12492 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
12493 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12495 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
12497 record_buf
[0] = ARM_PS_REGNUM
;
12498 thumb2_insn_r
->reg_rec_count
= 1;
12502 record_buf
[0] = reg_rd
;
12503 record_buf
[1] = ARM_PS_REGNUM
;
12504 thumb2_insn_r
->reg_rec_count
= 2;
12507 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12509 return ARM_RECORD_SUCCESS
;
12512 /* Generic handler for thumb2 instructions which effect destination and PS
12516 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
12519 uint32_t record_buf
[8];
12521 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12523 record_buf
[0] = reg_rd
;
12524 record_buf
[1] = ARM_PS_REGNUM
;
12525 thumb2_insn_r
->reg_rec_count
= 2;
12527 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12529 return ARM_RECORD_SUCCESS
;
12532 /* Handler for thumb2 branch and miscellaneous control instructions. */
12535 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
12537 uint32_t op
, op1
, op2
;
12538 uint32_t record_buf
[8];
12540 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12541 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
12542 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12544 /* Handle MSR insn. */
12545 if (!(op1
& 0x2) && 0x38 == op
)
12549 /* CPSR is going to be changed. */
12550 record_buf
[0] = ARM_PS_REGNUM
;
12551 thumb2_insn_r
->reg_rec_count
= 1;
12555 arm_record_unsupported_insn(thumb2_insn_r
);
12559 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
12562 record_buf
[0] = ARM_PS_REGNUM
;
12563 record_buf
[1] = ARM_LR_REGNUM
;
12564 thumb2_insn_r
->reg_rec_count
= 2;
12567 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12569 return ARM_RECORD_SUCCESS
;
12572 /* Handler for thumb2 store single data item instructions. */
12575 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
12577 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12579 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
12580 uint32_t address
, offset_addr
;
12581 uint32_t record_buf
[8], record_buf_mem
[8];
12584 ULONGEST u_regval
[2];
12586 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
12587 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
12588 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12589 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12591 if (bit (thumb2_insn_r
->arm_insn
, 23))
12594 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
12595 offset_addr
= u_regval
[0] + offset_imm
;
12596 address
= offset_addr
;
12601 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
12603 /* Handle STRB (register). */
12604 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12605 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
12606 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
12607 offset_addr
= u_regval
[1] << shift_imm
;
12608 address
= u_regval
[0] + offset_addr
;
12612 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12613 if (bit (thumb2_insn_r
->arm_insn
, 10))
12615 if (bit (thumb2_insn_r
->arm_insn
, 9))
12616 offset_addr
= u_regval
[0] + offset_imm
;
12618 offset_addr
= u_regval
[0] - offset_imm
;
12620 address
= offset_addr
;
12623 address
= u_regval
[0];
12629 /* Store byte instructions. */
12632 record_buf_mem
[0] = 1;
12634 /* Store half word instructions. */
12637 record_buf_mem
[0] = 2;
12639 /* Store word instructions. */
12642 record_buf_mem
[0] = 4;
12646 gdb_assert_not_reached ("no decoding pattern found");
12650 record_buf_mem
[1] = address
;
12651 thumb2_insn_r
->mem_rec_count
= 1;
12652 record_buf
[0] = reg_rn
;
12653 thumb2_insn_r
->reg_rec_count
= 1;
12655 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12657 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12659 return ARM_RECORD_SUCCESS
;
12662 /* Handler for thumb2 load memory hints instructions. */
12665 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
12667 uint32_t record_buf
[8];
12668 uint32_t reg_rt
, reg_rn
;
12670 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12671 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12673 if (ARM_PC_REGNUM
!= reg_rt
)
12675 record_buf
[0] = reg_rt
;
12676 record_buf
[1] = reg_rn
;
12677 record_buf
[2] = ARM_PS_REGNUM
;
12678 thumb2_insn_r
->reg_rec_count
= 3;
12680 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12682 return ARM_RECORD_SUCCESS
;
12685 return ARM_RECORD_FAILURE
;
12688 /* Handler for thumb2 load word instructions. */
12691 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
12693 uint32_t record_buf
[8];
12695 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12696 record_buf
[1] = ARM_PS_REGNUM
;
12697 thumb2_insn_r
->reg_rec_count
= 2;
12699 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12701 return ARM_RECORD_SUCCESS
;
12704 /* Handler for thumb2 long multiply, long multiply accumulate, and
12705 divide instructions. */
12708 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
12710 uint32_t opcode1
= 0, opcode2
= 0;
12711 uint32_t record_buf
[8];
12713 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
12714 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12716 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
12718 /* Handle SMULL, UMULL, SMULAL. */
12719 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12720 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12721 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12722 record_buf
[2] = ARM_PS_REGNUM
;
12723 thumb2_insn_r
->reg_rec_count
= 3;
12725 else if (1 == opcode1
|| 3 == opcode2
)
12727 /* Handle SDIV and UDIV. */
12728 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12729 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12730 record_buf
[2] = ARM_PS_REGNUM
;
12731 thumb2_insn_r
->reg_rec_count
= 3;
12734 return ARM_RECORD_FAILURE
;
12736 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12738 return ARM_RECORD_SUCCESS
;
12741 /* Record handler for thumb32 coprocessor instructions. */
12744 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
12746 if (bit (thumb2_insn_r
->arm_insn
, 25))
12747 return arm_record_coproc_data_proc (thumb2_insn_r
);
12749 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
12752 /* Record handler for advance SIMD structure load/store instructions. */
12755 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
12757 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12758 uint32_t l_bit
, a_bit
, b_bits
;
12759 uint32_t record_buf
[128], record_buf_mem
[128];
12760 uint32_t reg_rn
, reg_vd
, address
, f_elem
;
12761 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
12764 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
12765 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
12766 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12767 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12768 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12769 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
12770 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
12771 f_elem
= 8 / f_ebytes
;
12775 ULONGEST u_regval
= 0;
12776 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12777 address
= u_regval
;
12782 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12784 if (b_bits
== 0x07)
12786 else if (b_bits
== 0x0a)
12788 else if (b_bits
== 0x06)
12790 else if (b_bits
== 0x02)
12795 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12797 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12799 record_buf_mem
[index_m
++] = f_ebytes
;
12800 record_buf_mem
[index_m
++] = address
;
12801 address
= address
+ f_ebytes
;
12802 thumb2_insn_r
->mem_rec_count
+= 1;
12807 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12809 if (b_bits
== 0x09 || b_bits
== 0x08)
12811 else if (b_bits
== 0x03)
12816 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12817 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12819 for (loop_t
= 0; loop_t
< 2; loop_t
++)
12821 record_buf_mem
[index_m
++] = f_ebytes
;
12822 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12823 thumb2_insn_r
->mem_rec_count
+= 1;
12825 address
= address
+ (2 * f_ebytes
);
12829 else if ((b_bits
& 0x0e) == 0x04)
12831 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12833 for (loop_t
= 0; loop_t
< 3; loop_t
++)
12835 record_buf_mem
[index_m
++] = f_ebytes
;
12836 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12837 thumb2_insn_r
->mem_rec_count
+= 1;
12839 address
= address
+ (3 * f_ebytes
);
12843 else if (!(b_bits
& 0x0e))
12845 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12847 for (loop_t
= 0; loop_t
< 4; loop_t
++)
12849 record_buf_mem
[index_m
++] = f_ebytes
;
12850 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12851 thumb2_insn_r
->mem_rec_count
+= 1;
12853 address
= address
+ (4 * f_ebytes
);
12859 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
12861 if (bft_size
== 0x00)
12863 else if (bft_size
== 0x01)
12865 else if (bft_size
== 0x02)
12871 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
12872 thumb2_insn_r
->mem_rec_count
= 1;
12874 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
12875 thumb2_insn_r
->mem_rec_count
= 2;
12877 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
12878 thumb2_insn_r
->mem_rec_count
= 3;
12880 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
12881 thumb2_insn_r
->mem_rec_count
= 4;
12883 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
12885 record_buf_mem
[index_m
] = f_ebytes
;
12886 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
12895 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12896 thumb2_insn_r
->reg_rec_count
= 1;
12898 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12899 thumb2_insn_r
->reg_rec_count
= 2;
12901 else if ((b_bits
& 0x0e) == 0x04)
12902 thumb2_insn_r
->reg_rec_count
= 3;
12904 else if (!(b_bits
& 0x0e))
12905 thumb2_insn_r
->reg_rec_count
= 4;
12910 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
12911 thumb2_insn_r
->reg_rec_count
= 1;
12913 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
12914 thumb2_insn_r
->reg_rec_count
= 2;
12916 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
12917 thumb2_insn_r
->reg_rec_count
= 3;
12919 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
12920 thumb2_insn_r
->reg_rec_count
= 4;
12922 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
12923 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
12927 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
12929 record_buf
[index_r
] = reg_rn
;
12930 thumb2_insn_r
->reg_rec_count
+= 1;
12933 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12935 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12940 /* Decodes thumb2 instruction type and invokes its record handler. */
12942 static unsigned int
12943 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
12945 uint32_t op
, op1
, op2
;
12947 op
= bit (thumb2_insn_r
->arm_insn
, 15);
12948 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
12949 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12953 if (!(op2
& 0x64 ))
12955 /* Load/store multiple instruction. */
12956 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
12958 else if ((op2
& 0x64) == 0x4)
12960 /* Load/store (dual/exclusive) and table branch instruction. */
12961 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
12963 else if ((op2
& 0x60) == 0x20)
12965 /* Data-processing (shifted register). */
12966 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12968 else if (op2
& 0x40)
12970 /* Co-processor instructions. */
12971 return thumb2_record_coproc_insn (thumb2_insn_r
);
12974 else if (op1
== 0x02)
12978 /* Branches and miscellaneous control instructions. */
12979 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
12981 else if (op2
& 0x20)
12983 /* Data-processing (plain binary immediate) instruction. */
12984 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12988 /* Data-processing (modified immediate). */
12989 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12992 else if (op1
== 0x03)
12994 if (!(op2
& 0x71 ))
12996 /* Store single data item. */
12997 return thumb2_record_str_single_data (thumb2_insn_r
);
12999 else if (!((op2
& 0x71) ^ 0x10))
13001 /* Advanced SIMD or structure load/store instructions. */
13002 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
13004 else if (!((op2
& 0x67) ^ 0x01))
13006 /* Load byte, memory hints instruction. */
13007 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13009 else if (!((op2
& 0x67) ^ 0x03))
13011 /* Load halfword, memory hints instruction. */
13012 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13014 else if (!((op2
& 0x67) ^ 0x05))
13016 /* Load word instruction. */
13017 return thumb2_record_ld_word (thumb2_insn_r
);
13019 else if (!((op2
& 0x70) ^ 0x20))
13021 /* Data-processing (register) instruction. */
13022 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13024 else if (!((op2
& 0x78) ^ 0x30))
13026 /* Multiply, multiply accumulate, abs diff instruction. */
13027 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13029 else if (!((op2
& 0x78) ^ 0x38))
13031 /* Long multiply, long multiply accumulate, and divide. */
13032 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
13034 else if (op2
& 0x40)
13036 /* Co-processor instructions. */
13037 return thumb2_record_coproc_insn (thumb2_insn_r
);
13045 /* Abstract memory reader. */
13047 class abstract_memory_reader
13050 /* Read LEN bytes of target memory at address MEMADDR, placing the
13051 results in GDB's memory at BUF. Return true on success. */
13053 virtual bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) = 0;
13056 /* Instruction reader from real target. */
13058 class instruction_reader
: public abstract_memory_reader
13061 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13063 if (target_read_memory (memaddr
, buf
, len
))
13072 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13073 and positive val on fauilure. */
13076 extract_arm_insn (abstract_memory_reader
& reader
,
13077 insn_decode_record
*insn_record
, uint32_t insn_size
)
13079 gdb_byte buf
[insn_size
];
13081 memset (&buf
[0], 0, insn_size
);
13083 if (!reader
.read (insn_record
->this_addr
, buf
, insn_size
))
13085 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13087 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13091 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13093 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13097 decode_insn (abstract_memory_reader
&reader
, insn_decode_record
*arm_record
,
13098 record_type_t record_type
, uint32_t insn_size
)
13101 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13103 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
13105 arm_record_data_proc_misc_ld_str
, /* 000. */
13106 arm_record_data_proc_imm
, /* 001. */
13107 arm_record_ld_st_imm_offset
, /* 010. */
13108 arm_record_ld_st_reg_offset
, /* 011. */
13109 arm_record_ld_st_multiple
, /* 100. */
13110 arm_record_b_bl
, /* 101. */
13111 arm_record_asimd_vfp_coproc
, /* 110. */
13112 arm_record_coproc_data_proc
/* 111. */
13115 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13117 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13119 thumb_record_shift_add_sub
, /* 000. */
13120 thumb_record_add_sub_cmp_mov
, /* 001. */
13121 thumb_record_ld_st_reg_offset
, /* 010. */
13122 thumb_record_ld_st_imm_offset
, /* 011. */
13123 thumb_record_ld_st_stack
, /* 100. */
13124 thumb_record_misc
, /* 101. */
13125 thumb_record_ldm_stm_swi
, /* 110. */
13126 thumb_record_branch
/* 111. */
13129 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13130 uint32_t insn_id
= 0;
13132 if (extract_arm_insn (reader
, arm_record
, insn_size
))
13136 printf_unfiltered (_("Process record: error reading memory at "
13137 "addr %s len = %d.\n"),
13138 paddress (arm_record
->gdbarch
,
13139 arm_record
->this_addr
), insn_size
);
13143 else if (ARM_RECORD
== record_type
)
13145 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13146 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13148 if (arm_record
->cond
== 0xf)
13149 ret
= arm_record_extension_space (arm_record
);
13152 /* If this insn has fallen into extension space
13153 then we need not decode it anymore. */
13154 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13156 if (ret
!= ARM_RECORD_SUCCESS
)
13158 arm_record_unsupported_insn (arm_record
);
13162 else if (THUMB_RECORD
== record_type
)
13164 /* As thumb does not have condition codes, we set negative. */
13165 arm_record
->cond
= -1;
13166 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13167 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13168 if (ret
!= ARM_RECORD_SUCCESS
)
13170 arm_record_unsupported_insn (arm_record
);
13174 else if (THUMB2_RECORD
== record_type
)
13176 /* As thumb does not have condition codes, we set negative. */
13177 arm_record
->cond
= -1;
13179 /* Swap first half of 32bit thumb instruction with second half. */
13180 arm_record
->arm_insn
13181 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13183 ret
= thumb2_record_decode_insn_handler (arm_record
);
13185 if (ret
!= ARM_RECORD_SUCCESS
)
13187 arm_record_unsupported_insn (arm_record
);
13193 /* Throw assertion. */
13194 gdb_assert_not_reached ("not a valid instruction, could not decode");
13201 namespace selftests
{
13203 /* Provide both 16-bit and 32-bit thumb instructions. */
13205 class instruction_reader_thumb
: public abstract_memory_reader
13208 template<size_t SIZE
>
13209 instruction_reader_thumb (enum bfd_endian endian
,
13210 const uint16_t (&insns
)[SIZE
])
13211 : m_endian (endian
), m_insns (insns
), m_insns_size (SIZE
)
13214 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13216 SELF_CHECK (len
== 4 || len
== 2);
13217 SELF_CHECK (memaddr
% 2 == 0);
13218 SELF_CHECK ((memaddr
/ 2) < m_insns_size
);
13220 store_unsigned_integer (buf
, 2, m_endian
, m_insns
[memaddr
/ 2]);
13223 store_unsigned_integer (&buf
[2], 2, m_endian
,
13224 m_insns
[memaddr
/ 2 + 1]);
13230 enum bfd_endian m_endian
;
13231 const uint16_t *m_insns
;
13232 size_t m_insns_size
;
13236 arm_record_test (void)
13238 struct gdbarch_info info
;
13239 gdbarch_info_init (&info
);
13240 info
.bfd_arch_info
= bfd_scan_arch ("arm");
13242 struct gdbarch
*gdbarch
= gdbarch_find_by_info (info
);
13244 SELF_CHECK (gdbarch
!= NULL
);
13246 /* 16-bit Thumb instructions. */
13248 insn_decode_record arm_record
;
13250 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13251 arm_record
.gdbarch
= gdbarch
;
13253 static const uint16_t insns
[] = {
13254 /* db b2 uxtb r3, r3 */
13256 /* cd 58 ldr r5, [r1, r3] */
13260 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13261 instruction_reader_thumb
reader (endian
, insns
);
13262 int ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13263 THUMB_INSN_SIZE_BYTES
);
13265 SELF_CHECK (ret
== 0);
13266 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13267 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13268 SELF_CHECK (arm_record
.arm_regs
[0] == 3);
13270 arm_record
.this_addr
+= 2;
13271 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13272 THUMB_INSN_SIZE_BYTES
);
13274 SELF_CHECK (ret
== 0);
13275 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13276 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13277 SELF_CHECK (arm_record
.arm_regs
[0] == 5);
13280 /* 32-bit Thumb-2 instructions. */
13282 insn_decode_record arm_record
;
13284 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13285 arm_record
.gdbarch
= gdbarch
;
13287 static const uint16_t insns
[] = {
13288 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13292 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13293 instruction_reader_thumb
reader (endian
, insns
);
13294 int ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13295 THUMB2_INSN_SIZE_BYTES
);
13297 SELF_CHECK (ret
== 0);
13298 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13299 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13300 SELF_CHECK (arm_record
.arm_regs
[0] == 7);
13303 } // namespace selftests
13304 #endif /* GDB_SELF_TEST */
13306 /* Cleans up local record registers and memory allocations. */
13309 deallocate_reg_mem (insn_decode_record
*record
)
13311 xfree (record
->arm_regs
);
13312 xfree (record
->arm_mems
);
13316 /* Parse the current instruction and record the values of the registers and
13317 memory that will be changed in current instruction to record_arch_list".
13318 Return -1 if something is wrong. */
13321 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13322 CORE_ADDR insn_addr
)
13325 uint32_t no_of_rec
= 0;
13326 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13327 ULONGEST t_bit
= 0, insn_id
= 0;
13329 ULONGEST u_regval
= 0;
13331 insn_decode_record arm_record
;
13333 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13334 arm_record
.regcache
= regcache
;
13335 arm_record
.this_addr
= insn_addr
;
13336 arm_record
.gdbarch
= gdbarch
;
13339 if (record_debug
> 1)
13341 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13343 paddress (gdbarch
, arm_record
.this_addr
));
13346 instruction_reader reader
;
13347 if (extract_arm_insn (reader
, &arm_record
, 2))
13351 printf_unfiltered (_("Process record: error reading memory at "
13352 "addr %s len = %d.\n"),
13353 paddress (arm_record
.gdbarch
,
13354 arm_record
.this_addr
), 2);
13359 /* Check the insn, whether it is thumb or arm one. */
13361 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13362 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13365 if (!(u_regval
& t_bit
))
13367 /* We are decoding arm insn. */
13368 ret
= decode_insn (reader
, &arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13372 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13373 /* is it thumb2 insn? */
13374 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13376 ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13377 THUMB2_INSN_SIZE_BYTES
);
13381 /* We are decoding thumb insn. */
13382 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13383 THUMB_INSN_SIZE_BYTES
);
13389 /* Record registers. */
13390 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13391 if (arm_record
.arm_regs
)
13393 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13395 if (record_full_arch_list_add_reg
13396 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13400 /* Record memories. */
13401 if (arm_record
.arm_mems
)
13403 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
13405 if (record_full_arch_list_add_mem
13406 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
13407 arm_record
.arm_mems
[no_of_rec
].len
))
13412 if (record_full_arch_list_add_end ())
13417 deallocate_reg_mem (&arm_record
);