1 /* -*- mode: C; c-basic-offset: 3; -*- */
3 /*--------------------------------------------------------------------*/
4 /*--- MemCheck: some non-generic asm implementations of mc_main.c */
6 /*--- mc_main_asm.c ---*/
7 /*--------------------------------------------------------------------*/
10 This file is part of MemCheck, a heavyweight Valgrind tool for
11 detecting memory errors.
13 Copyright (C) 2000-2018 Julian Seward
16 This program is free software; you can redistribute it and/or
17 modify it under the terms of the GNU General Public License as
18 published by the Free Software Foundation; either version 2 of the
19 License, or (at your option) any later version.
21 This program is distributed in the hope that it will be useful, but
22 WITHOUT ANY WARRANTY; without even the implied warranty of
23 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
24 General Public License for more details.
26 You should have received a copy of the GNU General Public License
27 along with this program; if not, see <http://www.gnu.org/licenses/>.
29 The GNU General Public License is contained in the file COPYING.
32 /* Having these in mc_main.c gives undefined references at link time,
33 when compiling with lto. Having them in a separate file solves this.
34 Also, for some toolchain, we might maybe need to disable lto. */
36 // A bunch of include only needed for mc_include.h
37 #include "pub_tool_basics.h"
38 #include "pub_tool_poolalloc.h"
39 #include "pub_tool_hashtable.h"
40 #include "pub_tool_tooliface.h"
42 #include "mc_include.h"
44 // Non-generic assembly for arm32-linux
45 #if ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
46 && defined(VGP_arm_linux)
47 __asm__( /* Derived from the 32 bit assembly helper */
50 ".global vgMemCheck_helperc_LOADV64le \n"
51 ".type vgMemCheck_helperc_LOADV64le, %function \n"
52 "vgMemCheck_helperc_LOADV64le: \n"
54 " movw r3, #:lower16:primary_map \n"
55 " bne .LLV64LEc4 \n" // if misaligned
57 " movt r3, #:upper16:primary_map \n"
58 " ldr r2, [r3, r2, lsl #2] \n"
59 " uxth r1, r0 \n" // r1 is 0-(16)-0 X-(13)-X 000
60 " movw r3, #0xAAAA \n"
61 " lsr r1, r1, #2 \n" // r1 is 0-(16)-0 00 X-(13)-X 0
62 " ldrh r1, [r2, r1] \n"
63 " cmp r1, r3 \n" // 0xAAAA == VA_BITS16_DEFINED
64 " bne .LLV64LEc0 \n" // if !all_defined
65 " mov r1, #0x0 \n" // 0x0 == V_BITS32_DEFINED
66 " mov r0, #0x0 \n" // 0x0 == V_BITS32_DEFINED
69 " movw r3, #0x5555 \n"
70 " cmp r1, r3 \n" // 0x5555 == VA_BITS16_UNDEFINED
71 " bne .LLV64LEc4 \n" // if !all_undefined
72 " mov r1, #0xFFFFFFFF \n" // 0xFFFFFFFF == V_BITS32_UNDEFINED
73 " mov r0, #0xFFFFFFFF \n" // 0xFFFFFFFF == V_BITS32_UNDEFINED
79 " bl mc_LOADVn_slow \n"
81 ".size vgMemCheck_helperc_LOADV64le, .-vgMemCheck_helperc_LOADV64le \n"
85 #elif ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
86 && (defined(VGP_x86_linux) || defined(VGP_x86_solaris) || defined(VGP_x86_freebsd))
90 ".global vgMemCheck_helperc_LOADV64le\n"
91 ".type vgMemCheck_helperc_LOADV64le, @function\n"
92 "vgMemCheck_helperc_LOADV64le:\n"
94 " jne .LLV64LE2\n" /* jump if not aligned */
98 " mov primary_map(,%ecx,4), %ecx\n"
100 " movzwl (%ecx,%edx,2), %edx\n"
101 " cmp $0xaaaa, %edx\n"
102 " jne .LLV64LE1\n" /* jump if not all defined */
103 " xor %eax, %eax\n" /* return 0 in edx:eax */
107 " cmp $0x5555, %edx\n"
108 " jne .LLV64LE2\n" /* jump if not all undefined */
109 " or $0xffffffff, %eax\n" /* else return all bits set in edx:eax */
110 " or $0xffffffff, %edx\n"
113 " xor %ecx, %ecx\n" /* tail call to mc_LOADVn_slow(a, 64, 0) */
115 " jmp mc_LOADVn_slow\n"
116 ".size vgMemCheck_helperc_LOADV64le, .-vgMemCheck_helperc_LOADV64le\n"
121 // Generic for all platforms except {arm32,x86}-linux and x86-solaris
126 // Non-generic assembly for arm32-linux
127 #if ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
128 && defined(VGP_arm_linux)
129 __asm__( /* Derived from NCode template */
132 ".global vgMemCheck_helperc_LOADV32le \n"
133 ".type vgMemCheck_helperc_LOADV32le, %function \n"
134 "vgMemCheck_helperc_LOADV32le: \n"
135 " tst r0, #3 \n" // 1
136 " movw r3, #:lower16:primary_map \n" // 1
137 " bne .LLV32LEc4 \n" // 2 if misaligned
138 " lsr r2, r0, #16 \n" // 3
139 " movt r3, #:upper16:primary_map \n" // 3
140 " ldr r2, [r3, r2, lsl #2] \n" // 4
141 " uxth r1, r0 \n" // 4
142 " ldrb r1, [r2, r1, lsr #2] \n" // 5
143 " cmp r1, #0xAA \n" // 6 0xAA == VA_BITS8_DEFINED
144 " bne .LLV32LEc0 \n" // 7 if !all_defined
145 " mov r0, #0x0 \n" // 8 0x0 == V_BITS32_DEFINED
148 " cmp r1, #0x55 \n" // 0x55 == VA_BITS8_UNDEFINED
149 " bne .LLV32LEc4 \n" // if !all_undefined
150 " mov r0, #0xFFFFFFFF \n" // 0xFFFFFFFF == V_BITS32_UNDEFINED
156 " bl mc_LOADVn_slow \n"
158 ".size vgMemCheck_helperc_LOADV32le, .-vgMemCheck_helperc_LOADV32le \n"
162 #elif ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
163 && (defined(VGP_x86_linux) || defined(VGP_x86_solaris))
167 ".global vgMemCheck_helperc_LOADV32le\n"
168 ".type vgMemCheck_helperc_LOADV32le, @function\n"
169 "vgMemCheck_helperc_LOADV32le:\n"
171 " jnz .LLV32LE2\n" /* jump if misaligned */
174 " mov primary_map(,%edx,4), %ecx\n"
175 " movzwl %ax, %edx\n"
177 " movzbl (%ecx,%edx,1), %edx\n"
178 " cmp $0xaa, %edx\n" /* compare to VA_BITS8_DEFINED */
179 " jne .LLV32LE1\n" /* jump if not completely defined */
180 " xor %eax, %eax\n" /* else return V_BITS32_DEFINED */
183 " cmp $0x55, %edx\n" /* compare to VA_BITS8_UNDEFINED */
184 " jne .LLV32LE2\n" /* jump if not completely undefined */
185 " or $0xffffffff, %eax\n" /* else return V_BITS32_UNDEFINED */
188 " xor %ecx, %ecx\n" /* tail call mc_LOADVn_slow(a, 32, 0) */
190 " jmp mc_LOADVn_slow\n"
191 ".size vgMemCheck_helperc_LOADV32le, .-vgMemCheck_helperc_LOADV32le\n"
196 // Generic for all platforms except {arm32,x86}-linux and x86-solaris
200 /*--------------------------------------------------------------------*/
202 /*--------------------------------------------------------------------*/