2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ @VG_VER_MAJOR@
92 #define __VALGRIND_MINOR__ @VG_VER_MINOR@
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_freebsd
114 #undef PLAT_amd64_freebsd
115 #undef PLAT_arm64_freebsd
116 #undef PLAT_x86_win32
117 #undef PLAT_amd64_win64
118 #undef PLAT_x86_linux
119 #undef PLAT_amd64_linux
120 #undef PLAT_ppc32_linux
121 #undef PLAT_ppc64be_linux
122 #undef PLAT_ppc64le_linux
123 #undef PLAT_arm_linux
124 #undef PLAT_arm64_linux
125 #undef PLAT_s390x_linux
126 #undef PLAT_mips32_linux
127 #undef PLAT_mips64_linux
128 #undef PLAT_nanomips_linux
129 #undef PLAT_x86_solaris
130 #undef PLAT_amd64_solaris
133 #if defined(__APPLE__) && defined(__i386__)
134 # define PLAT_x86_darwin 1
135 #elif defined(__APPLE__) && defined(__x86_64__)
136 # define PLAT_amd64_darwin 1
137 #elif defined(__FreeBSD__) && defined(__i386__)
138 # define PLAT_x86_freebsd 1
139 #elif defined(__FreeBSD__) && defined(__amd64__)
140 # define PLAT_amd64_freebsd 1
141 #elif defined(__FreeBSD__) && defined(__aarch64__) && !defined(__arm__)
142 # define PLAT_arm64_freebsd 1
143 #elif (defined(__MINGW32__) && defined(__i386__)) \
144 || defined(__CYGWIN32__) \
145 || (defined(_WIN32) && defined(_M_IX86))
146 # define PLAT_x86_win32 1
147 #elif (defined(__MINGW32__) && defined(__x86_64__)) \
148 || (defined(_WIN32) && defined(_M_X64))
149 /* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
150 # define PLAT_amd64_win64 1
151 #elif defined(__linux__) && defined(__i386__)
152 # define PLAT_x86_linux 1
153 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
154 # define PLAT_amd64_linux 1
155 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
156 # define PLAT_ppc32_linux 1
157 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
158 /* Big Endian uses ELF version 1 */
159 # define PLAT_ppc64be_linux 1
160 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
161 /* Little Endian uses ELF version 2 */
162 # define PLAT_ppc64le_linux 1
163 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
164 # define PLAT_arm_linux 1
165 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
166 # define PLAT_arm64_linux 1
167 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
168 # define PLAT_s390x_linux 1
169 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
170 # define PLAT_mips64_linux 1
171 #elif defined(__linux__) && defined(__mips__) && (__mips==32)
172 # define PLAT_mips32_linux 1
173 #elif defined(__linux__) && defined(__nanomips__)
174 # define PLAT_nanomips_linux 1
175 #elif defined(__sun) && defined(__i386__)
176 # define PLAT_x86_solaris 1
177 #elif defined(__sun) && defined(__x86_64__)
178 # define PLAT_amd64_solaris 1
180 /* If we're not compiling for our target platform, don't generate
182 # if !defined(NVALGRIND)
188 /* ------------------------------------------------------------------ */
189 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
190 /* in here of use to end-users -- skip to the next section. */
191 /* ------------------------------------------------------------------ */
194 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
195 * request. Accepts both pointers and integers as arguments.
197 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
198 * client request that does not return a value.
200 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
201 * client request and whose value equals the client request result. Accepts
202 * both pointers and integers as arguments. Note that such calls are not
203 * necessarily pure functions -- they may have side effects.
206 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
207 _zzq_request, _zzq_arg1, _zzq_arg2, \
208 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
209 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
210 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
211 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
213 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
214 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
215 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
216 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
217 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
219 #if defined(NVALGRIND)
221 /* Define NVALGRIND to completely remove the Valgrind magic sequence
222 from the compiled code (analogous to NDEBUG's effects on
224 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
225 _zzq_default, _zzq_request, \
226 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
229 #else /* ! NVALGRIND */
231 /* The following defines the magic code sequences which the JITter
232 spots and handles magically. Don't look too closely at them as
233 they will rot your brain.
235 The assembly code sequences for all architectures is in this one
236 file. This is because this file must be stand-alone, and we don't
237 want to have multiple files.
239 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
240 value gets put in the return slot, so that everything works when
241 this is executed not under Valgrind. Args are passed in a memory
242 block, and so there's no intrinsic limit to the number that could
243 be passed, but it's currently five.
246 _zzq_rlval result lvalue
247 _zzq_default default value (result returned when running on real CPU)
248 _zzq_request request code
249 _zzq_arg1..5 request params
251 The other two macros are used to support function wrapping, and are
252 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
253 guest's NRADDR pseudo-register and whatever other information is
254 needed to safely run the call original from the wrapper: on
255 ppc64-linux, the R2 value at the divert point is also needed. This
256 information is abstracted into a user-visible type, OrigFn.
258 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
259 guest, but guarantees that the branch instruction will not be
260 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
261 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
262 complete inline asm, since it needs to be combined with more magic
263 inline asm stuff to be useful.
266 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
268 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
269 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
270 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
274 unsigned int nraddr
; /* where's the code? */
278 #define __SPECIAL_INSTRUCTION_PREAMBLE \
279 "roll $3, %%edi ; roll $13, %%edi\n\t" \
280 "roll $29, %%edi ; roll $19, %%edi\n\t"
282 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
283 _zzq_default, _zzq_request, \
284 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
286 ({volatile unsigned int _zzq_args[6]; \
287 volatile unsigned int _zzq_result; \
288 _zzq_args[0] = (unsigned int)(_zzq_request); \
289 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
290 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
291 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
292 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
293 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
294 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
295 /* %EDX = client_request ( %EAX ) */ \
296 "xchgl %%ebx,%%ebx" \
297 : "=d" (_zzq_result) \
298 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
304 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
305 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
306 volatile unsigned int __addr; \
307 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
308 /* %EAX = guest_NRADDR */ \
309 "xchgl %%ecx,%%ecx" \
314 _zzq_orig->nraddr = __addr; \
317 #define VALGRIND_CALL_NOREDIR_EAX \
318 __SPECIAL_INSTRUCTION_PREAMBLE \
319 /* call-noredir *%EAX */ \
320 "xchgl %%edx,%%edx\n\t"
322 #define VALGRIND_VEX_INJECT_IR() \
324 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
325 "xchgl %%edi,%%edi\n\t" \
326 : : : "cc", "memory" \
330 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
331 || PLAT_x86_solaris */
333 /* ------------------------- x86-Win32 ------------------------- */
335 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
339 unsigned int nraddr
; /* where's the code? */
343 #if defined(_MSC_VER)
345 #define __SPECIAL_INSTRUCTION_PREAMBLE \
346 __asm rol edi, 3 __asm rol edi, 13 \
347 __asm rol edi, 29 __asm rol edi, 19
349 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
350 _zzq_default, _zzq_request, \
351 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
352 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
353 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
354 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
355 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
357 static __inline
uintptr_t
358 valgrind_do_client_request_expr(uintptr_t _zzq_default
, uintptr_t _zzq_request
,
359 uintptr_t _zzq_arg1
, uintptr_t _zzq_arg2
,
360 uintptr_t _zzq_arg3
, uintptr_t _zzq_arg4
,
363 volatile uintptr_t _zzq_args
[6];
364 volatile unsigned int _zzq_result
;
365 _zzq_args
[0] = (uintptr_t)(_zzq_request
);
366 _zzq_args
[1] = (uintptr_t)(_zzq_arg1
);
367 _zzq_args
[2] = (uintptr_t)(_zzq_arg2
);
368 _zzq_args
[3] = (uintptr_t)(_zzq_arg3
);
369 _zzq_args
[4] = (uintptr_t)(_zzq_arg4
);
370 _zzq_args
[5] = (uintptr_t)(_zzq_arg5
);
371 __asm
{ __asm lea eax
, _zzq_args __asm mov edx
, _zzq_default
372 __SPECIAL_INSTRUCTION_PREAMBLE
373 /* %EDX = client_request ( %EAX ) */
375 __asm mov _zzq_result
, edx
380 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
381 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
382 volatile unsigned int __addr; \
383 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
384 /* %EAX = guest_NRADDR */ \
386 __asm mov __addr, eax \
388 _zzq_orig->nraddr = __addr; \
391 #define VALGRIND_CALL_NOREDIR_EAX ERROR
393 #define VALGRIND_VEX_INJECT_IR() \
395 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
401 #error Unsupported compiler.
404 #endif /* PLAT_x86_win32 */
406 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
408 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
409 || defined(PLAT_amd64_solaris) \
410 || defined(PLAT_amd64_freebsd) \
411 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
415 unsigned long int nraddr
; /* where's the code? */
419 #define __SPECIAL_INSTRUCTION_PREAMBLE \
420 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
421 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
423 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
424 _zzq_default, _zzq_request, \
425 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
427 ({ volatile unsigned long int _zzq_args[6]; \
428 volatile unsigned long int _zzq_result; \
429 _zzq_args[0] = (unsigned long int)(_zzq_request); \
430 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
431 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
432 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
433 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
434 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
435 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
436 /* %RDX = client_request ( %RAX ) */ \
437 "xchgq %%rbx,%%rbx" \
438 : "=d" (_zzq_result) \
439 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
445 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
446 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
447 volatile unsigned long int __addr; \
448 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
449 /* %RAX = guest_NRADDR */ \
450 "xchgq %%rcx,%%rcx" \
455 _zzq_orig->nraddr = __addr; \
458 #define VALGRIND_CALL_NOREDIR_RAX \
459 __SPECIAL_INSTRUCTION_PREAMBLE \
460 /* call-noredir *%RAX */ \
461 "xchgq %%rdx,%%rdx\n\t"
463 #define VALGRIND_VEX_INJECT_IR() \
465 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
466 "xchgq %%rdi,%%rdi\n\t" \
467 : : : "cc", "memory" \
471 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
473 /* ------------------------- amd64-Win64 ------------------------- */
475 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
477 #error Unsupported compiler.
479 #endif /* PLAT_amd64_win64 */
481 /* ------------------------ ppc32-linux ------------------------ */
483 #if defined(PLAT_ppc32_linux)
487 unsigned int nraddr
; /* where's the code? */
491 #define __SPECIAL_INSTRUCTION_PREAMBLE \
492 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
493 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
495 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
496 _zzq_default, _zzq_request, \
497 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
500 ({ unsigned int _zzq_args[6]; \
501 unsigned int _zzq_result; \
502 unsigned int* _zzq_ptr; \
503 _zzq_args[0] = (unsigned int)(_zzq_request); \
504 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
505 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
506 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
507 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
508 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
509 _zzq_ptr = _zzq_args; \
510 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
511 "mr 4,%2\n\t" /*ptr*/ \
512 __SPECIAL_INSTRUCTION_PREAMBLE \
513 /* %R3 = client_request ( %R4 ) */ \
515 "mr %0,3" /*result*/ \
516 : "=b" (_zzq_result) \
517 : "b" (_zzq_default), "b" (_zzq_ptr) \
518 : "cc", "memory", "r3", "r4"); \
522 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
523 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
524 unsigned int __addr; \
525 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
526 /* %R3 = guest_NRADDR */ \
531 : "cc", "memory", "r3" \
533 _zzq_orig->nraddr = __addr; \
536 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
537 __SPECIAL_INSTRUCTION_PREAMBLE \
538 /* branch-and-link-to-noredir *%R11 */ \
541 #define VALGRIND_VEX_INJECT_IR() \
543 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
548 #endif /* PLAT_ppc32_linux */
550 /* ------------------------ ppc64-linux ------------------------ */
552 #if defined(PLAT_ppc64be_linux)
556 unsigned long int nraddr
; /* where's the code? */
557 unsigned long int r2
; /* what tocptr do we need? */
561 #define __SPECIAL_INSTRUCTION_PREAMBLE \
562 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
563 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
565 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
566 _zzq_default, _zzq_request, \
567 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
570 ({ unsigned long int _zzq_args[6]; \
571 unsigned long int _zzq_result; \
572 unsigned long int* _zzq_ptr; \
573 _zzq_args[0] = (unsigned long int)(_zzq_request); \
574 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
575 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
576 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
577 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
578 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
579 _zzq_ptr = _zzq_args; \
580 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
581 "mr 4,%2\n\t" /*ptr*/ \
582 __SPECIAL_INSTRUCTION_PREAMBLE \
583 /* %R3 = client_request ( %R4 ) */ \
585 "mr %0,3" /*result*/ \
586 : "=b" (_zzq_result) \
587 : "b" (_zzq_default), "b" (_zzq_ptr) \
588 : "cc", "memory", "r3", "r4"); \
592 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
593 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
594 unsigned long int __addr; \
595 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
596 /* %R3 = guest_NRADDR */ \
601 : "cc", "memory", "r3" \
603 _zzq_orig->nraddr = __addr; \
604 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
605 /* %R3 = guest_NRADDR_GPR2 */ \
610 : "cc", "memory", "r3" \
612 _zzq_orig->r2 = __addr; \
615 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
616 __SPECIAL_INSTRUCTION_PREAMBLE \
617 /* branch-and-link-to-noredir *%R11 */ \
620 #define VALGRIND_VEX_INJECT_IR() \
622 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
627 #endif /* PLAT_ppc64be_linux */
629 #if defined(PLAT_ppc64le_linux)
633 unsigned long int nraddr
; /* where's the code? */
634 unsigned long int r2
; /* what tocptr do we need? */
638 #define __SPECIAL_INSTRUCTION_PREAMBLE \
639 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
640 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
642 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
643 _zzq_default, _zzq_request, \
644 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
647 ({ unsigned long int _zzq_args[6]; \
648 unsigned long int _zzq_result; \
649 unsigned long int* _zzq_ptr; \
650 _zzq_args[0] = (unsigned long int)(_zzq_request); \
651 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
652 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
653 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
654 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
655 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
656 _zzq_ptr = _zzq_args; \
657 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
658 "mr 4,%2\n\t" /*ptr*/ \
659 __SPECIAL_INSTRUCTION_PREAMBLE \
660 /* %R3 = client_request ( %R4 ) */ \
662 "mr %0,3" /*result*/ \
663 : "=b" (_zzq_result) \
664 : "b" (_zzq_default), "b" (_zzq_ptr) \
665 : "cc", "memory", "r3", "r4"); \
669 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
670 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
671 unsigned long int __addr; \
672 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
673 /* %R3 = guest_NRADDR */ \
678 : "cc", "memory", "r3" \
680 _zzq_orig->nraddr = __addr; \
681 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
682 /* %R3 = guest_NRADDR_GPR2 */ \
687 : "cc", "memory", "r3" \
689 _zzq_orig->r2 = __addr; \
692 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
693 __SPECIAL_INSTRUCTION_PREAMBLE \
694 /* branch-and-link-to-noredir *%R12 */ \
697 #define VALGRIND_VEX_INJECT_IR() \
699 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
704 #endif /* PLAT_ppc64le_linux */
706 /* ------------------------- arm-linux ------------------------- */
708 #if defined(PLAT_arm_linux)
712 unsigned int nraddr
; /* where's the code? */
716 #define __SPECIAL_INSTRUCTION_PREAMBLE \
717 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
718 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
720 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
721 _zzq_default, _zzq_request, \
722 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
725 ({volatile unsigned int _zzq_args[6]; \
726 volatile unsigned int _zzq_result; \
727 _zzq_args[0] = (unsigned int)(_zzq_request); \
728 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
729 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
730 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
731 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
732 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
733 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
734 "mov r4, %2\n\t" /*ptr*/ \
735 __SPECIAL_INSTRUCTION_PREAMBLE \
736 /* R3 = client_request ( R4 ) */ \
737 "orr r10, r10, r10\n\t" \
738 "mov %0, r3" /*result*/ \
739 : "=r" (_zzq_result) \
740 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
741 : "cc","memory", "r3", "r4"); \
745 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
746 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
747 unsigned int __addr; \
748 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
749 /* R3 = guest_NRADDR */ \
750 "orr r11, r11, r11\n\t" \
754 : "cc", "memory", "r3" \
756 _zzq_orig->nraddr = __addr; \
759 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
760 __SPECIAL_INSTRUCTION_PREAMBLE \
761 /* branch-and-link-to-noredir *%R4 */ \
762 "orr r12, r12, r12\n\t"
764 #define VALGRIND_VEX_INJECT_IR() \
766 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
767 "orr r9, r9, r9\n\t" \
768 : : : "cc", "memory" \
772 #endif /* PLAT_arm_linux */
774 /* ------------------------ arm64-{linux,freebsd} ------------------------- */
776 #if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
780 unsigned long int nraddr
; /* where's the code? */
784 #define __SPECIAL_INSTRUCTION_PREAMBLE \
785 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
786 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
788 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
789 _zzq_default, _zzq_request, \
790 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
793 ({volatile unsigned long int _zzq_args[6]; \
794 volatile unsigned long int _zzq_result; \
795 _zzq_args[0] = (unsigned long int)(_zzq_request); \
796 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
797 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
798 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
799 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
800 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
801 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
802 "mov x4, %2\n\t" /*ptr*/ \
803 __SPECIAL_INSTRUCTION_PREAMBLE \
804 /* X3 = client_request ( X4 ) */ \
805 "orr x10, x10, x10\n\t" \
806 "mov %0, x3" /*result*/ \
807 : "=r" (_zzq_result) \
808 : "r" ((unsigned long int)(_zzq_default)), \
809 "r" (&_zzq_args[0]) \
810 : "cc","memory", "x3", "x4"); \
814 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
815 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
816 unsigned long int __addr; \
817 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
818 /* X3 = guest_NRADDR */ \
819 "orr x11, x11, x11\n\t" \
823 : "cc", "memory", "x3" \
825 _zzq_orig->nraddr = __addr; \
828 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
829 __SPECIAL_INSTRUCTION_PREAMBLE \
830 /* branch-and-link-to-noredir X8 */ \
831 "orr x12, x12, x12\n\t"
833 #define VALGRIND_VEX_INJECT_IR() \
835 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
836 "orr x9, x9, x9\n\t" \
837 : : : "cc", "memory" \
841 #endif /* PLAT_arm64_linux || PLAT_arm64_freebsd */
843 /* ------------------------ s390x-linux ------------------------ */
845 #if defined(PLAT_s390x_linux)
849 unsigned long int nraddr
; /* where's the code? */
853 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
854 * code. This detection is implemented in platform specific toIR.c
855 * (e.g. VEX/priv/guest_s390_decoder.c).
857 #define __SPECIAL_INSTRUCTION_PREAMBLE \
863 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
864 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
865 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
866 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
868 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
869 _zzq_default, _zzq_request, \
870 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
872 ({volatile unsigned long int _zzq_args[6]; \
873 volatile unsigned long int _zzq_result; \
874 _zzq_args[0] = (unsigned long int)(_zzq_request); \
875 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
876 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
877 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
878 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
879 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
880 __asm__ volatile(/* r2 = args */ \
884 __SPECIAL_INSTRUCTION_PREAMBLE \
885 __CLIENT_REQUEST_CODE \
888 : "=d" (_zzq_result) \
889 : "a" (&_zzq_args[0]), \
890 "0" ((unsigned long int)_zzq_default) \
891 : "cc", "2", "3", "memory" \
896 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
897 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
898 volatile unsigned long int __addr; \
899 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
900 __GET_NR_CONTEXT_CODE \
904 : "cc", "3", "memory" \
906 _zzq_orig->nraddr = __addr; \
909 #define VALGRIND_CALL_NOREDIR_R1 \
910 __SPECIAL_INSTRUCTION_PREAMBLE \
913 #define VALGRIND_VEX_INJECT_IR() \
915 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
916 __VEX_INJECT_IR_CODE); \
919 #endif /* PLAT_s390x_linux */
921 /* ------------------------- mips32-linux ---------------- */
923 #if defined(PLAT_mips32_linux)
927 unsigned int nraddr
; /* where's the code? */
935 #define __SPECIAL_INSTRUCTION_PREAMBLE \
936 "srl $0, $0, 13\n\t" \
937 "srl $0, $0, 29\n\t" \
938 "srl $0, $0, 3\n\t" \
941 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
942 _zzq_default, _zzq_request, \
943 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
945 ({ volatile unsigned int _zzq_args[6]; \
946 volatile unsigned int _zzq_result; \
947 _zzq_args[0] = (unsigned int)(_zzq_request); \
948 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
949 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
950 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
951 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
952 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
953 __asm__ volatile("move $11, %1\n\t" /*default*/ \
954 "move $12, %2\n\t" /*ptr*/ \
955 __SPECIAL_INSTRUCTION_PREAMBLE \
956 /* T3 = client_request ( T4 ) */ \
957 "or $13, $13, $13\n\t" \
958 "move %0, $11\n\t" /*result*/ \
959 : "=r" (_zzq_result) \
960 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
961 : "$11", "$12", "memory"); \
965 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
966 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
967 volatile unsigned int __addr; \
968 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
969 /* %t9 = guest_NRADDR */ \
970 "or $14, $14, $14\n\t" \
971 "move %0, $11" /*result*/ \
976 _zzq_orig->nraddr = __addr; \
979 #define VALGRIND_CALL_NOREDIR_T9 \
980 __SPECIAL_INSTRUCTION_PREAMBLE \
981 /* call-noredir *%t9 */ \
982 "or $15, $15, $15\n\t"
984 #define VALGRIND_VEX_INJECT_IR() \
986 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
987 "or $11, $11, $11\n\t" \
992 #endif /* PLAT_mips32_linux */
994 /* ------------------------- mips64-linux ---------------- */
996 #if defined(PLAT_mips64_linux)
1000 unsigned long nraddr
; /* where's the code? */
1008 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1009 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1010 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1012 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1013 _zzq_default, _zzq_request, \
1014 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1016 ({ volatile unsigned long int _zzq_args[6]; \
1017 volatile unsigned long int _zzq_result; \
1018 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1019 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1020 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1021 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1022 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1023 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1024 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1025 "move $12, %2\n\t" /*ptr*/ \
1026 __SPECIAL_INSTRUCTION_PREAMBLE \
1027 /* $11 = client_request ( $12 ) */ \
1028 "or $13, $13, $13\n\t" \
1029 "move %0, $11\n\t" /*result*/ \
1030 : "=r" (_zzq_result) \
1031 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1032 : "$11", "$12", "memory"); \
1036 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1037 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1038 volatile unsigned long int __addr; \
1039 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1040 /* $11 = guest_NRADDR */ \
1041 "or $14, $14, $14\n\t" \
1042 "move %0, $11" /*result*/ \
1046 _zzq_orig->nraddr = __addr; \
1049 #define VALGRIND_CALL_NOREDIR_T9 \
1050 __SPECIAL_INSTRUCTION_PREAMBLE \
1051 /* call-noredir $25 */ \
1052 "or $15, $15, $15\n\t"
1054 #define VALGRIND_VEX_INJECT_IR() \
1056 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1057 "or $11, $11, $11\n\t" \
1061 #endif /* PLAT_mips64_linux */
1063 #if defined(PLAT_nanomips_linux)
1067 unsigned int nraddr
; /* where's the code? */
1071 8000 c04d srl zero, zero, 13
1072 8000 c05d srl zero, zero, 29
1073 8000 c043 srl zero, zero, 3
1074 8000 c053 srl zero, zero, 19
1077 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1078 "srl[32] $zero, $zero, 29 \n\t" \
1079 "srl[32] $zero, $zero, 3 \n\t" \
1080 "srl[32] $zero, $zero, 19 \n\t"
1082 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1083 _zzq_default, _zzq_request, \
1084 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1086 ({ volatile unsigned int _zzq_args[6]; \
1087 volatile unsigned int _zzq_result; \
1088 _zzq_args[0] = (unsigned int)(_zzq_request); \
1089 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1090 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1091 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1092 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1093 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1094 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1095 "move $t0, %2\n\t" /* ptr */ \
1096 __SPECIAL_INSTRUCTION_PREAMBLE \
1097 /* $a7 = client_request( $t0 ) */ \
1098 "or[32] $t0, $t0, $t0\n\t" \
1099 "move %0, $a7\n\t" /* result */ \
1100 : "=r" (_zzq_result) \
1101 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1102 : "$a7", "$t0", "memory"); \
1106 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1107 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1108 volatile unsigned long int __addr; \
1109 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1110 /* $a7 = guest_NRADDR */ \
1111 "or[32] $t1, $t1, $t1\n\t" \
1112 "move %0, $a7" /*result*/ \
1116 _zzq_orig->nraddr = __addr; \
1119 #define VALGRIND_CALL_NOREDIR_T9 \
1120 __SPECIAL_INSTRUCTION_PREAMBLE \
1121 /* call-noredir $25 */ \
1122 "or[32] $t2, $t2, $t2\n\t"
1124 #define VALGRIND_VEX_INJECT_IR() \
1126 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1127 "or[32] $t3, $t3, $t3\n\t" \
1132 /* Insert assembly code for other platforms here... */
1134 #endif /* NVALGRIND */
1137 /* ------------------------------------------------------------------ */
1138 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1139 /* ugly. It's the least-worst tradeoff I can think of. */
1140 /* ------------------------------------------------------------------ */
1142 /* This section defines magic (a.k.a appalling-hack) macros for doing
1143 guaranteed-no-redirection macros, so as to get from function
1144 wrappers to the functions they are wrapping. The whole point is to
1145 construct standard call sequences, but to do the call itself with a
1146 special no-redirect call pseudo-instruction that the JIT
1147 understands and handles specially. This section is long and
1148 repetitious, and I can't see a way to make it shorter.
1150 The naming scheme is as follows:
1152 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1154 'W' stands for "word" and 'v' for "void". Hence there are
1155 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1156 and for each, the possibility of returning a word-typed result, or
1160 /* Use these to write the name of your wrapper. NOTE: duplicates
1161 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1162 the default behaviour equivalance class tag "0000" into the name.
1163 See pub_tool_redir.h for details -- normally you don't need to
1164 think about this, though. */
1166 /* Use an extra level of macroisation so as to ensure the soname/fnname
1167 args are fully macro-expanded before pasting them together. */
1168 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1170 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1171 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1173 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1174 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1176 /* Use this macro from within a wrapper function to collect the
1177 context (address and possibly other info) of the original function.
1178 Once you have that you can then use it in one of the CALL_FN_
1179 macros. The type of the argument _lval is OrigFn. */
1180 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1182 /* Also provide end-user facilities for function replacement, rather
1183 than wrapping. A replacement function differs from a wrapper in
1184 that it has no way to get hold of the original function being
1185 called, and hence no way to call onwards to it. In a replacement
1186 function, VALGRIND_GET_ORIG_FN always returns zero. */
1188 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1189 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1191 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1192 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1194 /* Derivatives of the main macros below, for calling functions
1197 #define CALL_FN_v_v(fnptr) \
1198 do { volatile unsigned long _junk; \
1199 CALL_FN_W_v(_junk,fnptr); } while (0)
1201 #define CALL_FN_v_W(fnptr, arg1) \
1202 do { volatile unsigned long _junk; \
1203 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1205 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1206 do { volatile unsigned long _junk; \
1207 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1209 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1210 do { volatile unsigned long _junk; \
1211 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1213 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1214 do { volatile unsigned long _junk; \
1215 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1217 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1218 do { volatile unsigned long _junk; \
1219 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1221 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1222 do { volatile unsigned long _junk; \
1223 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1225 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1226 do { volatile unsigned long _junk; \
1227 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1229 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1231 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1232 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
1234 /* These regs are trashed by the hidden call. No need to mention eax
1235 as gcc can already see that, plus causes gcc to bomb. */
1236 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1238 /* Macros to save and align the stack before making a function
1239 call and restore it afterwards as gcc may not keep the stack
1240 pointer aligned if it doesn't realise calls are being made
1241 to other functions. */
1243 #define VALGRIND_ALIGN_STACK \
1244 "movl %%esp,%%edi\n\t" \
1245 "andl $0xfffffff0,%%esp\n\t"
1246 #define VALGRIND_RESTORE_STACK \
1247 "movl %%edi,%%esp\n\t"
1249 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1252 #define CALL_FN_W_v(lval, orig) \
1254 volatile OrigFn _orig = (orig); \
1255 volatile unsigned long _argvec[1]; \
1256 volatile unsigned long _res; \
1257 _argvec[0] = (unsigned long)_orig.nraddr; \
1259 VALGRIND_ALIGN_STACK \
1260 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1261 VALGRIND_CALL_NOREDIR_EAX \
1262 VALGRIND_RESTORE_STACK \
1263 : /*out*/ "=a" (_res) \
1264 : /*in*/ "a" (&_argvec[0]) \
1265 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1267 lval = (__typeof__(lval)) _res; \
1270 #define CALL_FN_W_W(lval, orig, arg1) \
1272 volatile OrigFn _orig = (orig); \
1273 volatile unsigned long _argvec[2]; \
1274 volatile unsigned long _res; \
1275 _argvec[0] = (unsigned long)_orig.nraddr; \
1276 _argvec[1] = (unsigned long)(arg1); \
1278 VALGRIND_ALIGN_STACK \
1279 "subl $12, %%esp\n\t" \
1280 "pushl 4(%%eax)\n\t" \
1281 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1282 VALGRIND_CALL_NOREDIR_EAX \
1283 VALGRIND_RESTORE_STACK \
1284 : /*out*/ "=a" (_res) \
1285 : /*in*/ "a" (&_argvec[0]) \
1286 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1288 lval = (__typeof__(lval)) _res; \
1291 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1293 volatile OrigFn _orig = (orig); \
1294 volatile unsigned long _argvec[3]; \
1295 volatile unsigned long _res; \
1296 _argvec[0] = (unsigned long)_orig.nraddr; \
1297 _argvec[1] = (unsigned long)(arg1); \
1298 _argvec[2] = (unsigned long)(arg2); \
1300 VALGRIND_ALIGN_STACK \
1301 "subl $8, %%esp\n\t" \
1302 "pushl 8(%%eax)\n\t" \
1303 "pushl 4(%%eax)\n\t" \
1304 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1305 VALGRIND_CALL_NOREDIR_EAX \
1306 VALGRIND_RESTORE_STACK \
1307 : /*out*/ "=a" (_res) \
1308 : /*in*/ "a" (&_argvec[0]) \
1309 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1311 lval = (__typeof__(lval)) _res; \
1314 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1316 volatile OrigFn _orig = (orig); \
1317 volatile unsigned long _argvec[4]; \
1318 volatile unsigned long _res; \
1319 _argvec[0] = (unsigned long)_orig.nraddr; \
1320 _argvec[1] = (unsigned long)(arg1); \
1321 _argvec[2] = (unsigned long)(arg2); \
1322 _argvec[3] = (unsigned long)(arg3); \
1324 VALGRIND_ALIGN_STACK \
1325 "subl $4, %%esp\n\t" \
1326 "pushl 12(%%eax)\n\t" \
1327 "pushl 8(%%eax)\n\t" \
1328 "pushl 4(%%eax)\n\t" \
1329 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1330 VALGRIND_CALL_NOREDIR_EAX \
1331 VALGRIND_RESTORE_STACK \
1332 : /*out*/ "=a" (_res) \
1333 : /*in*/ "a" (&_argvec[0]) \
1334 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1336 lval = (__typeof__(lval)) _res; \
1339 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1341 volatile OrigFn _orig = (orig); \
1342 volatile unsigned long _argvec[5]; \
1343 volatile unsigned long _res; \
1344 _argvec[0] = (unsigned long)_orig.nraddr; \
1345 _argvec[1] = (unsigned long)(arg1); \
1346 _argvec[2] = (unsigned long)(arg2); \
1347 _argvec[3] = (unsigned long)(arg3); \
1348 _argvec[4] = (unsigned long)(arg4); \
1350 VALGRIND_ALIGN_STACK \
1351 "pushl 16(%%eax)\n\t" \
1352 "pushl 12(%%eax)\n\t" \
1353 "pushl 8(%%eax)\n\t" \
1354 "pushl 4(%%eax)\n\t" \
1355 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1356 VALGRIND_CALL_NOREDIR_EAX \
1357 VALGRIND_RESTORE_STACK \
1358 : /*out*/ "=a" (_res) \
1359 : /*in*/ "a" (&_argvec[0]) \
1360 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1362 lval = (__typeof__(lval)) _res; \
1365 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1367 volatile OrigFn _orig = (orig); \
1368 volatile unsigned long _argvec[6]; \
1369 volatile unsigned long _res; \
1370 _argvec[0] = (unsigned long)_orig.nraddr; \
1371 _argvec[1] = (unsigned long)(arg1); \
1372 _argvec[2] = (unsigned long)(arg2); \
1373 _argvec[3] = (unsigned long)(arg3); \
1374 _argvec[4] = (unsigned long)(arg4); \
1375 _argvec[5] = (unsigned long)(arg5); \
1377 VALGRIND_ALIGN_STACK \
1378 "subl $12, %%esp\n\t" \
1379 "pushl 20(%%eax)\n\t" \
1380 "pushl 16(%%eax)\n\t" \
1381 "pushl 12(%%eax)\n\t" \
1382 "pushl 8(%%eax)\n\t" \
1383 "pushl 4(%%eax)\n\t" \
1384 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1385 VALGRIND_CALL_NOREDIR_EAX \
1386 VALGRIND_RESTORE_STACK \
1387 : /*out*/ "=a" (_res) \
1388 : /*in*/ "a" (&_argvec[0]) \
1389 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1391 lval = (__typeof__(lval)) _res; \
1394 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1396 volatile OrigFn _orig = (orig); \
1397 volatile unsigned long _argvec[7]; \
1398 volatile unsigned long _res; \
1399 _argvec[0] = (unsigned long)_orig.nraddr; \
1400 _argvec[1] = (unsigned long)(arg1); \
1401 _argvec[2] = (unsigned long)(arg2); \
1402 _argvec[3] = (unsigned long)(arg3); \
1403 _argvec[4] = (unsigned long)(arg4); \
1404 _argvec[5] = (unsigned long)(arg5); \
1405 _argvec[6] = (unsigned long)(arg6); \
1407 VALGRIND_ALIGN_STACK \
1408 "subl $8, %%esp\n\t" \
1409 "pushl 24(%%eax)\n\t" \
1410 "pushl 20(%%eax)\n\t" \
1411 "pushl 16(%%eax)\n\t" \
1412 "pushl 12(%%eax)\n\t" \
1413 "pushl 8(%%eax)\n\t" \
1414 "pushl 4(%%eax)\n\t" \
1415 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1416 VALGRIND_CALL_NOREDIR_EAX \
1417 VALGRIND_RESTORE_STACK \
1418 : /*out*/ "=a" (_res) \
1419 : /*in*/ "a" (&_argvec[0]) \
1420 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1422 lval = (__typeof__(lval)) _res; \
1425 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1428 volatile OrigFn _orig = (orig); \
1429 volatile unsigned long _argvec[8]; \
1430 volatile unsigned long _res; \
1431 _argvec[0] = (unsigned long)_orig.nraddr; \
1432 _argvec[1] = (unsigned long)(arg1); \
1433 _argvec[2] = (unsigned long)(arg2); \
1434 _argvec[3] = (unsigned long)(arg3); \
1435 _argvec[4] = (unsigned long)(arg4); \
1436 _argvec[5] = (unsigned long)(arg5); \
1437 _argvec[6] = (unsigned long)(arg6); \
1438 _argvec[7] = (unsigned long)(arg7); \
1440 VALGRIND_ALIGN_STACK \
1441 "subl $4, %%esp\n\t" \
1442 "pushl 28(%%eax)\n\t" \
1443 "pushl 24(%%eax)\n\t" \
1444 "pushl 20(%%eax)\n\t" \
1445 "pushl 16(%%eax)\n\t" \
1446 "pushl 12(%%eax)\n\t" \
1447 "pushl 8(%%eax)\n\t" \
1448 "pushl 4(%%eax)\n\t" \
1449 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1450 VALGRIND_CALL_NOREDIR_EAX \
1451 VALGRIND_RESTORE_STACK \
1452 : /*out*/ "=a" (_res) \
1453 : /*in*/ "a" (&_argvec[0]) \
1454 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1456 lval = (__typeof__(lval)) _res; \
1459 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1462 volatile OrigFn _orig = (orig); \
1463 volatile unsigned long _argvec[9]; \
1464 volatile unsigned long _res; \
1465 _argvec[0] = (unsigned long)_orig.nraddr; \
1466 _argvec[1] = (unsigned long)(arg1); \
1467 _argvec[2] = (unsigned long)(arg2); \
1468 _argvec[3] = (unsigned long)(arg3); \
1469 _argvec[4] = (unsigned long)(arg4); \
1470 _argvec[5] = (unsigned long)(arg5); \
1471 _argvec[6] = (unsigned long)(arg6); \
1472 _argvec[7] = (unsigned long)(arg7); \
1473 _argvec[8] = (unsigned long)(arg8); \
1475 VALGRIND_ALIGN_STACK \
1476 "pushl 32(%%eax)\n\t" \
1477 "pushl 28(%%eax)\n\t" \
1478 "pushl 24(%%eax)\n\t" \
1479 "pushl 20(%%eax)\n\t" \
1480 "pushl 16(%%eax)\n\t" \
1481 "pushl 12(%%eax)\n\t" \
1482 "pushl 8(%%eax)\n\t" \
1483 "pushl 4(%%eax)\n\t" \
1484 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1485 VALGRIND_CALL_NOREDIR_EAX \
1486 VALGRIND_RESTORE_STACK \
1487 : /*out*/ "=a" (_res) \
1488 : /*in*/ "a" (&_argvec[0]) \
1489 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1491 lval = (__typeof__(lval)) _res; \
1494 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1497 volatile OrigFn _orig = (orig); \
1498 volatile unsigned long _argvec[10]; \
1499 volatile unsigned long _res; \
1500 _argvec[0] = (unsigned long)_orig.nraddr; \
1501 _argvec[1] = (unsigned long)(arg1); \
1502 _argvec[2] = (unsigned long)(arg2); \
1503 _argvec[3] = (unsigned long)(arg3); \
1504 _argvec[4] = (unsigned long)(arg4); \
1505 _argvec[5] = (unsigned long)(arg5); \
1506 _argvec[6] = (unsigned long)(arg6); \
1507 _argvec[7] = (unsigned long)(arg7); \
1508 _argvec[8] = (unsigned long)(arg8); \
1509 _argvec[9] = (unsigned long)(arg9); \
1511 VALGRIND_ALIGN_STACK \
1512 "subl $12, %%esp\n\t" \
1513 "pushl 36(%%eax)\n\t" \
1514 "pushl 32(%%eax)\n\t" \
1515 "pushl 28(%%eax)\n\t" \
1516 "pushl 24(%%eax)\n\t" \
1517 "pushl 20(%%eax)\n\t" \
1518 "pushl 16(%%eax)\n\t" \
1519 "pushl 12(%%eax)\n\t" \
1520 "pushl 8(%%eax)\n\t" \
1521 "pushl 4(%%eax)\n\t" \
1522 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1523 VALGRIND_CALL_NOREDIR_EAX \
1524 VALGRIND_RESTORE_STACK \
1525 : /*out*/ "=a" (_res) \
1526 : /*in*/ "a" (&_argvec[0]) \
1527 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1529 lval = (__typeof__(lval)) _res; \
1532 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1533 arg7,arg8,arg9,arg10) \
1535 volatile OrigFn _orig = (orig); \
1536 volatile unsigned long _argvec[11]; \
1537 volatile unsigned long _res; \
1538 _argvec[0] = (unsigned long)_orig.nraddr; \
1539 _argvec[1] = (unsigned long)(arg1); \
1540 _argvec[2] = (unsigned long)(arg2); \
1541 _argvec[3] = (unsigned long)(arg3); \
1542 _argvec[4] = (unsigned long)(arg4); \
1543 _argvec[5] = (unsigned long)(arg5); \
1544 _argvec[6] = (unsigned long)(arg6); \
1545 _argvec[7] = (unsigned long)(arg7); \
1546 _argvec[8] = (unsigned long)(arg8); \
1547 _argvec[9] = (unsigned long)(arg9); \
1548 _argvec[10] = (unsigned long)(arg10); \
1550 VALGRIND_ALIGN_STACK \
1551 "subl $8, %%esp\n\t" \
1552 "pushl 40(%%eax)\n\t" \
1553 "pushl 36(%%eax)\n\t" \
1554 "pushl 32(%%eax)\n\t" \
1555 "pushl 28(%%eax)\n\t" \
1556 "pushl 24(%%eax)\n\t" \
1557 "pushl 20(%%eax)\n\t" \
1558 "pushl 16(%%eax)\n\t" \
1559 "pushl 12(%%eax)\n\t" \
1560 "pushl 8(%%eax)\n\t" \
1561 "pushl 4(%%eax)\n\t" \
1562 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1563 VALGRIND_CALL_NOREDIR_EAX \
1564 VALGRIND_RESTORE_STACK \
1565 : /*out*/ "=a" (_res) \
1566 : /*in*/ "a" (&_argvec[0]) \
1567 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1569 lval = (__typeof__(lval)) _res; \
1572 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1573 arg6,arg7,arg8,arg9,arg10, \
1576 volatile OrigFn _orig = (orig); \
1577 volatile unsigned long _argvec[12]; \
1578 volatile unsigned long _res; \
1579 _argvec[0] = (unsigned long)_orig.nraddr; \
1580 _argvec[1] = (unsigned long)(arg1); \
1581 _argvec[2] = (unsigned long)(arg2); \
1582 _argvec[3] = (unsigned long)(arg3); \
1583 _argvec[4] = (unsigned long)(arg4); \
1584 _argvec[5] = (unsigned long)(arg5); \
1585 _argvec[6] = (unsigned long)(arg6); \
1586 _argvec[7] = (unsigned long)(arg7); \
1587 _argvec[8] = (unsigned long)(arg8); \
1588 _argvec[9] = (unsigned long)(arg9); \
1589 _argvec[10] = (unsigned long)(arg10); \
1590 _argvec[11] = (unsigned long)(arg11); \
1592 VALGRIND_ALIGN_STACK \
1593 "subl $4, %%esp\n\t" \
1594 "pushl 44(%%eax)\n\t" \
1595 "pushl 40(%%eax)\n\t" \
1596 "pushl 36(%%eax)\n\t" \
1597 "pushl 32(%%eax)\n\t" \
1598 "pushl 28(%%eax)\n\t" \
1599 "pushl 24(%%eax)\n\t" \
1600 "pushl 20(%%eax)\n\t" \
1601 "pushl 16(%%eax)\n\t" \
1602 "pushl 12(%%eax)\n\t" \
1603 "pushl 8(%%eax)\n\t" \
1604 "pushl 4(%%eax)\n\t" \
1605 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1606 VALGRIND_CALL_NOREDIR_EAX \
1607 VALGRIND_RESTORE_STACK \
1608 : /*out*/ "=a" (_res) \
1609 : /*in*/ "a" (&_argvec[0]) \
1610 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1612 lval = (__typeof__(lval)) _res; \
1615 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1616 arg6,arg7,arg8,arg9,arg10, \
1619 volatile OrigFn _orig = (orig); \
1620 volatile unsigned long _argvec[13]; \
1621 volatile unsigned long _res; \
1622 _argvec[0] = (unsigned long)_orig.nraddr; \
1623 _argvec[1] = (unsigned long)(arg1); \
1624 _argvec[2] = (unsigned long)(arg2); \
1625 _argvec[3] = (unsigned long)(arg3); \
1626 _argvec[4] = (unsigned long)(arg4); \
1627 _argvec[5] = (unsigned long)(arg5); \
1628 _argvec[6] = (unsigned long)(arg6); \
1629 _argvec[7] = (unsigned long)(arg7); \
1630 _argvec[8] = (unsigned long)(arg8); \
1631 _argvec[9] = (unsigned long)(arg9); \
1632 _argvec[10] = (unsigned long)(arg10); \
1633 _argvec[11] = (unsigned long)(arg11); \
1634 _argvec[12] = (unsigned long)(arg12); \
1636 VALGRIND_ALIGN_STACK \
1637 "pushl 48(%%eax)\n\t" \
1638 "pushl 44(%%eax)\n\t" \
1639 "pushl 40(%%eax)\n\t" \
1640 "pushl 36(%%eax)\n\t" \
1641 "pushl 32(%%eax)\n\t" \
1642 "pushl 28(%%eax)\n\t" \
1643 "pushl 24(%%eax)\n\t" \
1644 "pushl 20(%%eax)\n\t" \
1645 "pushl 16(%%eax)\n\t" \
1646 "pushl 12(%%eax)\n\t" \
1647 "pushl 8(%%eax)\n\t" \
1648 "pushl 4(%%eax)\n\t" \
1649 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1650 VALGRIND_CALL_NOREDIR_EAX \
1651 VALGRIND_RESTORE_STACK \
1652 : /*out*/ "=a" (_res) \
1653 : /*in*/ "a" (&_argvec[0]) \
1654 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1656 lval = (__typeof__(lval)) _res; \
1659 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1661 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1663 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1664 || defined(PLAT_amd64_solaris) || defined(PLAT_amd64_freebsd)
1666 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1668 /* These regs are trashed by the hidden call. */
1669 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1670 "rdi", "r8", "r9", "r10", "r11"
1672 /* This is all pretty complex. It's so as to make stack unwinding
1673 work reliably. See bug 243270. The basic problem is the sub and
1674 add of 128 of %rsp in all of the following macros. If gcc believes
1675 the CFA is in %rsp, then unwinding may fail, because what's at the
1676 CFA is not what gcc "expected" when it constructs the CFIs for the
1677 places where the macros are instantiated.
1679 But we can't just add a CFI annotation to increase the CFA offset
1680 by 128, to match the sub of 128 from %rsp, because we don't know
1681 whether gcc has chosen %rsp as the CFA at that point, or whether it
1682 has chosen some other register (eg, %rbp). In the latter case,
1683 adding a CFI annotation to change the CFA offset is simply wrong.
1685 So the solution is to get hold of the CFA using
1686 __builtin_dwarf_cfa(), put it in a known register, and add a
1687 CFI annotation to say what the register is. We choose %rbp for
1688 this (perhaps perversely), because:
1690 (1) %rbp is already subject to unwinding. If a new register was
1691 chosen then the unwinder would have to unwind it in all stack
1692 traces, which is expensive, and
1694 (2) %rbp is already subject to precise exception updates in the
1695 JIT. If a new register was chosen, we'd have to have precise
1696 exceptions for it too, which reduces performance of the
1699 However .. one extra complication. We can't just whack the result
1700 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1701 list of trashed registers at the end of the inline assembly
1702 fragments; gcc won't allow %rbp to appear in that list. Hence
1703 instead we need to stash %rbp in %r15 for the duration of the asm,
1704 and say that %r15 is trashed instead. gcc seems happy to go with
1707 Oh .. and this all needs to be conditionalised so that it is
1708 unchanged from before this commit, when compiled with older gccs
1709 that don't support __builtin_dwarf_cfa. Furthermore, since
1710 this header file is freestanding, it has to be independent of
1711 config.h, and so the following conditionalisation cannot depend on
1712 configure time checks.
1714 Although it's not clear from
1715 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1716 this expression excludes Darwin.
1717 .cfi directives in Darwin assembly appear to be completely
1718 different and I haven't investigated how they work.
1720 For even more entertainment value, note we have to use the
1721 completely undocumented __builtin_dwarf_cfa(), which appears to
1722 really compute the CFA, whereas __builtin_frame_address(0) claims
1723 to but actually doesn't. See
1724 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1726 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1727 # define __FRAME_POINTER \
1728 ,"r"(__builtin_dwarf_cfa())
1729 # define VALGRIND_CFI_PROLOGUE \
1730 "movq %%rbp, %%r15\n\t" \
1731 "movq %2, %%rbp\n\t" \
1732 ".cfi_remember_state\n\t" \
1733 ".cfi_def_cfa rbp, 0\n\t"
1734 # define VALGRIND_CFI_EPILOGUE \
1735 "movq %%r15, %%rbp\n\t" \
1736 ".cfi_restore_state\n\t"
1738 # define __FRAME_POINTER
1739 # define VALGRIND_CFI_PROLOGUE
1740 # define VALGRIND_CFI_EPILOGUE
1743 /* Macros to save and align the stack before making a function
1744 call and restore it afterwards as gcc may not keep the stack
1745 pointer aligned if it doesn't realise calls are being made
1746 to other functions. */
1748 #define VALGRIND_ALIGN_STACK \
1749 "movq %%rsp,%%r14\n\t" \
1750 "andq $0xfffffffffffffff0,%%rsp\n\t"
1751 #define VALGRIND_RESTORE_STACK \
1752 "movq %%r14,%%rsp\n\t"
1754 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1757 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1758 macros. In order not to trash the stack redzone, we need to drop
1759 %rsp by 128 before the hidden call, and restore afterwards. The
1760 nastyness is that it is only by luck that the stack still appears
1761 to be unwindable during the hidden call - since then the behaviour
1762 of any routine using this macro does not match what the CFI data
1765 Why is this important? Imagine that a wrapper has a stack
1766 allocated local, and passes to the hidden call, a pointer to it.
1767 Because gcc does not know about the hidden call, it may allocate
1768 that local in the redzone. Unfortunately the hidden call may then
1769 trash it before it comes to use it. So we must step clear of the
1770 redzone, for the duration of the hidden call, to make it safe.
1772 Probably the same problem afflicts the other redzone-style ABIs too
1773 (ppc64-linux); but for those, the stack is
1774 self describing (none of this CFI nonsense) so at least messing
1775 with the stack pointer doesn't give a danger of non-unwindable
1778 #define CALL_FN_W_v(lval, orig) \
1780 volatile OrigFn _orig = (orig); \
1781 volatile unsigned long _argvec[1]; \
1782 volatile unsigned long _res; \
1783 _argvec[0] = (unsigned long)_orig.nraddr; \
1785 VALGRIND_CFI_PROLOGUE \
1786 VALGRIND_ALIGN_STACK \
1787 "subq $128,%%rsp\n\t" \
1788 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1789 VALGRIND_CALL_NOREDIR_RAX \
1790 VALGRIND_RESTORE_STACK \
1791 VALGRIND_CFI_EPILOGUE \
1792 : /*out*/ "=a" (_res) \
1793 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1794 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1796 lval = (__typeof__(lval)) _res; \
1799 #define CALL_FN_W_W(lval, orig, arg1) \
1801 volatile OrigFn _orig = (orig); \
1802 volatile unsigned long _argvec[2]; \
1803 volatile unsigned long _res; \
1804 _argvec[0] = (unsigned long)_orig.nraddr; \
1805 _argvec[1] = (unsigned long)(arg1); \
1807 VALGRIND_CFI_PROLOGUE \
1808 VALGRIND_ALIGN_STACK \
1809 "subq $128,%%rsp\n\t" \
1810 "movq 8(%%rax), %%rdi\n\t" \
1811 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1812 VALGRIND_CALL_NOREDIR_RAX \
1813 VALGRIND_RESTORE_STACK \
1814 VALGRIND_CFI_EPILOGUE \
1815 : /*out*/ "=a" (_res) \
1816 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1817 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1819 lval = (__typeof__(lval)) _res; \
1822 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1824 volatile OrigFn _orig = (orig); \
1825 volatile unsigned long _argvec[3]; \
1826 volatile unsigned long _res; \
1827 _argvec[0] = (unsigned long)_orig.nraddr; \
1828 _argvec[1] = (unsigned long)(arg1); \
1829 _argvec[2] = (unsigned long)(arg2); \
1831 VALGRIND_CFI_PROLOGUE \
1832 VALGRIND_ALIGN_STACK \
1833 "subq $128,%%rsp\n\t" \
1834 "movq 16(%%rax), %%rsi\n\t" \
1835 "movq 8(%%rax), %%rdi\n\t" \
1836 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1837 VALGRIND_CALL_NOREDIR_RAX \
1838 VALGRIND_RESTORE_STACK \
1839 VALGRIND_CFI_EPILOGUE \
1840 : /*out*/ "=a" (_res) \
1841 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1842 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1844 lval = (__typeof__(lval)) _res; \
1847 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1849 volatile OrigFn _orig = (orig); \
1850 volatile unsigned long _argvec[4]; \
1851 volatile unsigned long _res; \
1852 _argvec[0] = (unsigned long)_orig.nraddr; \
1853 _argvec[1] = (unsigned long)(arg1); \
1854 _argvec[2] = (unsigned long)(arg2); \
1855 _argvec[3] = (unsigned long)(arg3); \
1857 VALGRIND_CFI_PROLOGUE \
1858 VALGRIND_ALIGN_STACK \
1859 "subq $128,%%rsp\n\t" \
1860 "movq 24(%%rax), %%rdx\n\t" \
1861 "movq 16(%%rax), %%rsi\n\t" \
1862 "movq 8(%%rax), %%rdi\n\t" \
1863 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1864 VALGRIND_CALL_NOREDIR_RAX \
1865 VALGRIND_RESTORE_STACK \
1866 VALGRIND_CFI_EPILOGUE \
1867 : /*out*/ "=a" (_res) \
1868 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1869 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1871 lval = (__typeof__(lval)) _res; \
1874 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1876 volatile OrigFn _orig = (orig); \
1877 volatile unsigned long _argvec[5]; \
1878 volatile unsigned long _res; \
1879 _argvec[0] = (unsigned long)_orig.nraddr; \
1880 _argvec[1] = (unsigned long)(arg1); \
1881 _argvec[2] = (unsigned long)(arg2); \
1882 _argvec[3] = (unsigned long)(arg3); \
1883 _argvec[4] = (unsigned long)(arg4); \
1885 VALGRIND_CFI_PROLOGUE \
1886 VALGRIND_ALIGN_STACK \
1887 "subq $128,%%rsp\n\t" \
1888 "movq 32(%%rax), %%rcx\n\t" \
1889 "movq 24(%%rax), %%rdx\n\t" \
1890 "movq 16(%%rax), %%rsi\n\t" \
1891 "movq 8(%%rax), %%rdi\n\t" \
1892 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1893 VALGRIND_CALL_NOREDIR_RAX \
1894 VALGRIND_RESTORE_STACK \
1895 VALGRIND_CFI_EPILOGUE \
1896 : /*out*/ "=a" (_res) \
1897 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1898 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1900 lval = (__typeof__(lval)) _res; \
1903 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1905 volatile OrigFn _orig = (orig); \
1906 volatile unsigned long _argvec[6]; \
1907 volatile unsigned long _res; \
1908 _argvec[0] = (unsigned long)_orig.nraddr; \
1909 _argvec[1] = (unsigned long)(arg1); \
1910 _argvec[2] = (unsigned long)(arg2); \
1911 _argvec[3] = (unsigned long)(arg3); \
1912 _argvec[4] = (unsigned long)(arg4); \
1913 _argvec[5] = (unsigned long)(arg5); \
1915 VALGRIND_CFI_PROLOGUE \
1916 VALGRIND_ALIGN_STACK \
1917 "subq $128,%%rsp\n\t" \
1918 "movq 40(%%rax), %%r8\n\t" \
1919 "movq 32(%%rax), %%rcx\n\t" \
1920 "movq 24(%%rax), %%rdx\n\t" \
1921 "movq 16(%%rax), %%rsi\n\t" \
1922 "movq 8(%%rax), %%rdi\n\t" \
1923 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1924 VALGRIND_CALL_NOREDIR_RAX \
1925 VALGRIND_RESTORE_STACK \
1926 VALGRIND_CFI_EPILOGUE \
1927 : /*out*/ "=a" (_res) \
1928 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1929 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1931 lval = (__typeof__(lval)) _res; \
1934 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1936 volatile OrigFn _orig = (orig); \
1937 volatile unsigned long _argvec[7]; \
1938 volatile unsigned long _res; \
1939 _argvec[0] = (unsigned long)_orig.nraddr; \
1940 _argvec[1] = (unsigned long)(arg1); \
1941 _argvec[2] = (unsigned long)(arg2); \
1942 _argvec[3] = (unsigned long)(arg3); \
1943 _argvec[4] = (unsigned long)(arg4); \
1944 _argvec[5] = (unsigned long)(arg5); \
1945 _argvec[6] = (unsigned long)(arg6); \
1947 VALGRIND_CFI_PROLOGUE \
1948 VALGRIND_ALIGN_STACK \
1949 "subq $128,%%rsp\n\t" \
1950 "movq 48(%%rax), %%r9\n\t" \
1951 "movq 40(%%rax), %%r8\n\t" \
1952 "movq 32(%%rax), %%rcx\n\t" \
1953 "movq 24(%%rax), %%rdx\n\t" \
1954 "movq 16(%%rax), %%rsi\n\t" \
1955 "movq 8(%%rax), %%rdi\n\t" \
1956 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1957 VALGRIND_CALL_NOREDIR_RAX \
1958 VALGRIND_RESTORE_STACK \
1959 VALGRIND_CFI_EPILOGUE \
1960 : /*out*/ "=a" (_res) \
1961 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1962 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1964 lval = (__typeof__(lval)) _res; \
1967 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1970 volatile OrigFn _orig = (orig); \
1971 volatile unsigned long _argvec[8]; \
1972 volatile unsigned long _res; \
1973 _argvec[0] = (unsigned long)_orig.nraddr; \
1974 _argvec[1] = (unsigned long)(arg1); \
1975 _argvec[2] = (unsigned long)(arg2); \
1976 _argvec[3] = (unsigned long)(arg3); \
1977 _argvec[4] = (unsigned long)(arg4); \
1978 _argvec[5] = (unsigned long)(arg5); \
1979 _argvec[6] = (unsigned long)(arg6); \
1980 _argvec[7] = (unsigned long)(arg7); \
1982 VALGRIND_CFI_PROLOGUE \
1983 VALGRIND_ALIGN_STACK \
1984 "subq $136,%%rsp\n\t" \
1985 "pushq 56(%%rax)\n\t" \
1986 "movq 48(%%rax), %%r9\n\t" \
1987 "movq 40(%%rax), %%r8\n\t" \
1988 "movq 32(%%rax), %%rcx\n\t" \
1989 "movq 24(%%rax), %%rdx\n\t" \
1990 "movq 16(%%rax), %%rsi\n\t" \
1991 "movq 8(%%rax), %%rdi\n\t" \
1992 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1993 VALGRIND_CALL_NOREDIR_RAX \
1994 VALGRIND_RESTORE_STACK \
1995 VALGRIND_CFI_EPILOGUE \
1996 : /*out*/ "=a" (_res) \
1997 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1998 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2000 lval = (__typeof__(lval)) _res; \
2003 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2006 volatile OrigFn _orig = (orig); \
2007 volatile unsigned long _argvec[9]; \
2008 volatile unsigned long _res; \
2009 _argvec[0] = (unsigned long)_orig.nraddr; \
2010 _argvec[1] = (unsigned long)(arg1); \
2011 _argvec[2] = (unsigned long)(arg2); \
2012 _argvec[3] = (unsigned long)(arg3); \
2013 _argvec[4] = (unsigned long)(arg4); \
2014 _argvec[5] = (unsigned long)(arg5); \
2015 _argvec[6] = (unsigned long)(arg6); \
2016 _argvec[7] = (unsigned long)(arg7); \
2017 _argvec[8] = (unsigned long)(arg8); \
2019 VALGRIND_CFI_PROLOGUE \
2020 VALGRIND_ALIGN_STACK \
2021 "subq $128,%%rsp\n\t" \
2022 "pushq 64(%%rax)\n\t" \
2023 "pushq 56(%%rax)\n\t" \
2024 "movq 48(%%rax), %%r9\n\t" \
2025 "movq 40(%%rax), %%r8\n\t" \
2026 "movq 32(%%rax), %%rcx\n\t" \
2027 "movq 24(%%rax), %%rdx\n\t" \
2028 "movq 16(%%rax), %%rsi\n\t" \
2029 "movq 8(%%rax), %%rdi\n\t" \
2030 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2031 VALGRIND_CALL_NOREDIR_RAX \
2032 VALGRIND_RESTORE_STACK \
2033 VALGRIND_CFI_EPILOGUE \
2034 : /*out*/ "=a" (_res) \
2035 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2036 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2038 lval = (__typeof__(lval)) _res; \
2041 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2044 volatile OrigFn _orig = (orig); \
2045 volatile unsigned long _argvec[10]; \
2046 volatile unsigned long _res; \
2047 _argvec[0] = (unsigned long)_orig.nraddr; \
2048 _argvec[1] = (unsigned long)(arg1); \
2049 _argvec[2] = (unsigned long)(arg2); \
2050 _argvec[3] = (unsigned long)(arg3); \
2051 _argvec[4] = (unsigned long)(arg4); \
2052 _argvec[5] = (unsigned long)(arg5); \
2053 _argvec[6] = (unsigned long)(arg6); \
2054 _argvec[7] = (unsigned long)(arg7); \
2055 _argvec[8] = (unsigned long)(arg8); \
2056 _argvec[9] = (unsigned long)(arg9); \
2058 VALGRIND_CFI_PROLOGUE \
2059 VALGRIND_ALIGN_STACK \
2060 "subq $136,%%rsp\n\t" \
2061 "pushq 72(%%rax)\n\t" \
2062 "pushq 64(%%rax)\n\t" \
2063 "pushq 56(%%rax)\n\t" \
2064 "movq 48(%%rax), %%r9\n\t" \
2065 "movq 40(%%rax), %%r8\n\t" \
2066 "movq 32(%%rax), %%rcx\n\t" \
2067 "movq 24(%%rax), %%rdx\n\t" \
2068 "movq 16(%%rax), %%rsi\n\t" \
2069 "movq 8(%%rax), %%rdi\n\t" \
2070 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2071 VALGRIND_CALL_NOREDIR_RAX \
2072 VALGRIND_RESTORE_STACK \
2073 VALGRIND_CFI_EPILOGUE \
2074 : /*out*/ "=a" (_res) \
2075 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2076 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2078 lval = (__typeof__(lval)) _res; \
2081 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2082 arg7,arg8,arg9,arg10) \
2084 volatile OrigFn _orig = (orig); \
2085 volatile unsigned long _argvec[11]; \
2086 volatile unsigned long _res; \
2087 _argvec[0] = (unsigned long)_orig.nraddr; \
2088 _argvec[1] = (unsigned long)(arg1); \
2089 _argvec[2] = (unsigned long)(arg2); \
2090 _argvec[3] = (unsigned long)(arg3); \
2091 _argvec[4] = (unsigned long)(arg4); \
2092 _argvec[5] = (unsigned long)(arg5); \
2093 _argvec[6] = (unsigned long)(arg6); \
2094 _argvec[7] = (unsigned long)(arg7); \
2095 _argvec[8] = (unsigned long)(arg8); \
2096 _argvec[9] = (unsigned long)(arg9); \
2097 _argvec[10] = (unsigned long)(arg10); \
2099 VALGRIND_CFI_PROLOGUE \
2100 VALGRIND_ALIGN_STACK \
2101 "subq $128,%%rsp\n\t" \
2102 "pushq 80(%%rax)\n\t" \
2103 "pushq 72(%%rax)\n\t" \
2104 "pushq 64(%%rax)\n\t" \
2105 "pushq 56(%%rax)\n\t" \
2106 "movq 48(%%rax), %%r9\n\t" \
2107 "movq 40(%%rax), %%r8\n\t" \
2108 "movq 32(%%rax), %%rcx\n\t" \
2109 "movq 24(%%rax), %%rdx\n\t" \
2110 "movq 16(%%rax), %%rsi\n\t" \
2111 "movq 8(%%rax), %%rdi\n\t" \
2112 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2113 VALGRIND_CALL_NOREDIR_RAX \
2114 VALGRIND_RESTORE_STACK \
2115 VALGRIND_CFI_EPILOGUE \
2116 : /*out*/ "=a" (_res) \
2117 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2118 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2120 lval = (__typeof__(lval)) _res; \
2123 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2124 arg7,arg8,arg9,arg10,arg11) \
2126 volatile OrigFn _orig = (orig); \
2127 volatile unsigned long _argvec[12]; \
2128 volatile unsigned long _res; \
2129 _argvec[0] = (unsigned long)_orig.nraddr; \
2130 _argvec[1] = (unsigned long)(arg1); \
2131 _argvec[2] = (unsigned long)(arg2); \
2132 _argvec[3] = (unsigned long)(arg3); \
2133 _argvec[4] = (unsigned long)(arg4); \
2134 _argvec[5] = (unsigned long)(arg5); \
2135 _argvec[6] = (unsigned long)(arg6); \
2136 _argvec[7] = (unsigned long)(arg7); \
2137 _argvec[8] = (unsigned long)(arg8); \
2138 _argvec[9] = (unsigned long)(arg9); \
2139 _argvec[10] = (unsigned long)(arg10); \
2140 _argvec[11] = (unsigned long)(arg11); \
2142 VALGRIND_CFI_PROLOGUE \
2143 VALGRIND_ALIGN_STACK \
2144 "subq $136,%%rsp\n\t" \
2145 "pushq 88(%%rax)\n\t" \
2146 "pushq 80(%%rax)\n\t" \
2147 "pushq 72(%%rax)\n\t" \
2148 "pushq 64(%%rax)\n\t" \
2149 "pushq 56(%%rax)\n\t" \
2150 "movq 48(%%rax), %%r9\n\t" \
2151 "movq 40(%%rax), %%r8\n\t" \
2152 "movq 32(%%rax), %%rcx\n\t" \
2153 "movq 24(%%rax), %%rdx\n\t" \
2154 "movq 16(%%rax), %%rsi\n\t" \
2155 "movq 8(%%rax), %%rdi\n\t" \
2156 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2157 VALGRIND_CALL_NOREDIR_RAX \
2158 VALGRIND_RESTORE_STACK \
2159 VALGRIND_CFI_EPILOGUE \
2160 : /*out*/ "=a" (_res) \
2161 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2162 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2164 lval = (__typeof__(lval)) _res; \
2167 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2168 arg7,arg8,arg9,arg10,arg11,arg12) \
2170 volatile OrigFn _orig = (orig); \
2171 volatile unsigned long _argvec[13]; \
2172 volatile unsigned long _res; \
2173 _argvec[0] = (unsigned long)_orig.nraddr; \
2174 _argvec[1] = (unsigned long)(arg1); \
2175 _argvec[2] = (unsigned long)(arg2); \
2176 _argvec[3] = (unsigned long)(arg3); \
2177 _argvec[4] = (unsigned long)(arg4); \
2178 _argvec[5] = (unsigned long)(arg5); \
2179 _argvec[6] = (unsigned long)(arg6); \
2180 _argvec[7] = (unsigned long)(arg7); \
2181 _argvec[8] = (unsigned long)(arg8); \
2182 _argvec[9] = (unsigned long)(arg9); \
2183 _argvec[10] = (unsigned long)(arg10); \
2184 _argvec[11] = (unsigned long)(arg11); \
2185 _argvec[12] = (unsigned long)(arg12); \
2187 VALGRIND_CFI_PROLOGUE \
2188 VALGRIND_ALIGN_STACK \
2189 "subq $128,%%rsp\n\t" \
2190 "pushq 96(%%rax)\n\t" \
2191 "pushq 88(%%rax)\n\t" \
2192 "pushq 80(%%rax)\n\t" \
2193 "pushq 72(%%rax)\n\t" \
2194 "pushq 64(%%rax)\n\t" \
2195 "pushq 56(%%rax)\n\t" \
2196 "movq 48(%%rax), %%r9\n\t" \
2197 "movq 40(%%rax), %%r8\n\t" \
2198 "movq 32(%%rax), %%rcx\n\t" \
2199 "movq 24(%%rax), %%rdx\n\t" \
2200 "movq 16(%%rax), %%rsi\n\t" \
2201 "movq 8(%%rax), %%rdi\n\t" \
2202 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2203 VALGRIND_CALL_NOREDIR_RAX \
2204 VALGRIND_RESTORE_STACK \
2205 VALGRIND_CFI_EPILOGUE \
2206 : /*out*/ "=a" (_res) \
2207 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2208 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2210 lval = (__typeof__(lval)) _res; \
2213 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2215 /* ------------------------ ppc32-linux ------------------------ */
2217 #if defined(PLAT_ppc32_linux)
2219 /* This is useful for finding out about the on-stack stuff:
2221 extern int f9 ( int,int,int,int,int,int,int,int,int );
2222 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2223 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2224 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2227 return f9(11,22,33,44,55,66,77,88,99);
2230 return f10(11,22,33,44,55,66,77,88,99,110);
2233 return f11(11,22,33,44,55,66,77,88,99,110,121);
2236 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2240 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2242 /* These regs are trashed by the hidden call. */
2243 #define __CALLER_SAVED_REGS \
2244 "lr", "ctr", "xer", \
2245 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2246 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2249 /* Macros to save and align the stack before making a function
2250 call and restore it afterwards as gcc may not keep the stack
2251 pointer aligned if it doesn't realise calls are being made
2252 to other functions. */
2254 #define VALGRIND_ALIGN_STACK \
2256 "rlwinm 1,1,0,0,27\n\t"
2257 #define VALGRIND_RESTORE_STACK \
2260 /* These CALL_FN_ macros assume that on ppc32-linux,
2261 sizeof(unsigned long) == 4. */
2263 #define CALL_FN_W_v(lval, orig) \
2265 volatile OrigFn _orig = (orig); \
2266 volatile unsigned long _argvec[1]; \
2267 volatile unsigned long _res; \
2268 _argvec[0] = (unsigned long)_orig.nraddr; \
2270 VALGRIND_ALIGN_STACK \
2272 "lwz 11,0(11)\n\t" /* target->r11 */ \
2273 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2274 VALGRIND_RESTORE_STACK \
2276 : /*out*/ "=r" (_res) \
2277 : /*in*/ "r" (&_argvec[0]) \
2278 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2280 lval = (__typeof__(lval)) _res; \
2283 #define CALL_FN_W_W(lval, orig, arg1) \
2285 volatile OrigFn _orig = (orig); \
2286 volatile unsigned long _argvec[2]; \
2287 volatile unsigned long _res; \
2288 _argvec[0] = (unsigned long)_orig.nraddr; \
2289 _argvec[1] = (unsigned long)arg1; \
2291 VALGRIND_ALIGN_STACK \
2293 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2294 "lwz 11,0(11)\n\t" /* target->r11 */ \
2295 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2296 VALGRIND_RESTORE_STACK \
2298 : /*out*/ "=r" (_res) \
2299 : /*in*/ "r" (&_argvec[0]) \
2300 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2302 lval = (__typeof__(lval)) _res; \
2305 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2307 volatile OrigFn _orig = (orig); \
2308 volatile unsigned long _argvec[3]; \
2309 volatile unsigned long _res; \
2310 _argvec[0] = (unsigned long)_orig.nraddr; \
2311 _argvec[1] = (unsigned long)arg1; \
2312 _argvec[2] = (unsigned long)arg2; \
2314 VALGRIND_ALIGN_STACK \
2316 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2318 "lwz 11,0(11)\n\t" /* target->r11 */ \
2319 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2320 VALGRIND_RESTORE_STACK \
2322 : /*out*/ "=r" (_res) \
2323 : /*in*/ "r" (&_argvec[0]) \
2324 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2326 lval = (__typeof__(lval)) _res; \
2329 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2331 volatile OrigFn _orig = (orig); \
2332 volatile unsigned long _argvec[4]; \
2333 volatile unsigned long _res; \
2334 _argvec[0] = (unsigned long)_orig.nraddr; \
2335 _argvec[1] = (unsigned long)arg1; \
2336 _argvec[2] = (unsigned long)arg2; \
2337 _argvec[3] = (unsigned long)arg3; \
2339 VALGRIND_ALIGN_STACK \
2341 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2343 "lwz 5,12(11)\n\t" \
2344 "lwz 11,0(11)\n\t" /* target->r11 */ \
2345 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2346 VALGRIND_RESTORE_STACK \
2348 : /*out*/ "=r" (_res) \
2349 : /*in*/ "r" (&_argvec[0]) \
2350 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2352 lval = (__typeof__(lval)) _res; \
2355 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2357 volatile OrigFn _orig = (orig); \
2358 volatile unsigned long _argvec[5]; \
2359 volatile unsigned long _res; \
2360 _argvec[0] = (unsigned long)_orig.nraddr; \
2361 _argvec[1] = (unsigned long)arg1; \
2362 _argvec[2] = (unsigned long)arg2; \
2363 _argvec[3] = (unsigned long)arg3; \
2364 _argvec[4] = (unsigned long)arg4; \
2366 VALGRIND_ALIGN_STACK \
2368 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2370 "lwz 5,12(11)\n\t" \
2371 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2372 "lwz 11,0(11)\n\t" /* target->r11 */ \
2373 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2374 VALGRIND_RESTORE_STACK \
2376 : /*out*/ "=r" (_res) \
2377 : /*in*/ "r" (&_argvec[0]) \
2378 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2380 lval = (__typeof__(lval)) _res; \
2383 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2385 volatile OrigFn _orig = (orig); \
2386 volatile unsigned long _argvec[6]; \
2387 volatile unsigned long _res; \
2388 _argvec[0] = (unsigned long)_orig.nraddr; \
2389 _argvec[1] = (unsigned long)arg1; \
2390 _argvec[2] = (unsigned long)arg2; \
2391 _argvec[3] = (unsigned long)arg3; \
2392 _argvec[4] = (unsigned long)arg4; \
2393 _argvec[5] = (unsigned long)arg5; \
2395 VALGRIND_ALIGN_STACK \
2397 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2399 "lwz 5,12(11)\n\t" \
2400 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2401 "lwz 7,20(11)\n\t" \
2402 "lwz 11,0(11)\n\t" /* target->r11 */ \
2403 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2404 VALGRIND_RESTORE_STACK \
2406 : /*out*/ "=r" (_res) \
2407 : /*in*/ "r" (&_argvec[0]) \
2408 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2410 lval = (__typeof__(lval)) _res; \
2413 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2415 volatile OrigFn _orig = (orig); \
2416 volatile unsigned long _argvec[7]; \
2417 volatile unsigned long _res; \
2418 _argvec[0] = (unsigned long)_orig.nraddr; \
2419 _argvec[1] = (unsigned long)arg1; \
2420 _argvec[2] = (unsigned long)arg2; \
2421 _argvec[3] = (unsigned long)arg3; \
2422 _argvec[4] = (unsigned long)arg4; \
2423 _argvec[5] = (unsigned long)arg5; \
2424 _argvec[6] = (unsigned long)arg6; \
2426 VALGRIND_ALIGN_STACK \
2428 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2430 "lwz 5,12(11)\n\t" \
2431 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2432 "lwz 7,20(11)\n\t" \
2433 "lwz 8,24(11)\n\t" \
2434 "lwz 11,0(11)\n\t" /* target->r11 */ \
2435 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2436 VALGRIND_RESTORE_STACK \
2438 : /*out*/ "=r" (_res) \
2439 : /*in*/ "r" (&_argvec[0]) \
2440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2442 lval = (__typeof__(lval)) _res; \
2445 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2448 volatile OrigFn _orig = (orig); \
2449 volatile unsigned long _argvec[8]; \
2450 volatile unsigned long _res; \
2451 _argvec[0] = (unsigned long)_orig.nraddr; \
2452 _argvec[1] = (unsigned long)arg1; \
2453 _argvec[2] = (unsigned long)arg2; \
2454 _argvec[3] = (unsigned long)arg3; \
2455 _argvec[4] = (unsigned long)arg4; \
2456 _argvec[5] = (unsigned long)arg5; \
2457 _argvec[6] = (unsigned long)arg6; \
2458 _argvec[7] = (unsigned long)arg7; \
2460 VALGRIND_ALIGN_STACK \
2462 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2464 "lwz 5,12(11)\n\t" \
2465 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2466 "lwz 7,20(11)\n\t" \
2467 "lwz 8,24(11)\n\t" \
2468 "lwz 9,28(11)\n\t" \
2469 "lwz 11,0(11)\n\t" /* target->r11 */ \
2470 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2471 VALGRIND_RESTORE_STACK \
2473 : /*out*/ "=r" (_res) \
2474 : /*in*/ "r" (&_argvec[0]) \
2475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2477 lval = (__typeof__(lval)) _res; \
2480 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2483 volatile OrigFn _orig = (orig); \
2484 volatile unsigned long _argvec[9]; \
2485 volatile unsigned long _res; \
2486 _argvec[0] = (unsigned long)_orig.nraddr; \
2487 _argvec[1] = (unsigned long)arg1; \
2488 _argvec[2] = (unsigned long)arg2; \
2489 _argvec[3] = (unsigned long)arg3; \
2490 _argvec[4] = (unsigned long)arg4; \
2491 _argvec[5] = (unsigned long)arg5; \
2492 _argvec[6] = (unsigned long)arg6; \
2493 _argvec[7] = (unsigned long)arg7; \
2494 _argvec[8] = (unsigned long)arg8; \
2496 VALGRIND_ALIGN_STACK \
2498 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2500 "lwz 5,12(11)\n\t" \
2501 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2502 "lwz 7,20(11)\n\t" \
2503 "lwz 8,24(11)\n\t" \
2504 "lwz 9,28(11)\n\t" \
2505 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2506 "lwz 11,0(11)\n\t" /* target->r11 */ \
2507 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2508 VALGRIND_RESTORE_STACK \
2510 : /*out*/ "=r" (_res) \
2511 : /*in*/ "r" (&_argvec[0]) \
2512 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2514 lval = (__typeof__(lval)) _res; \
2517 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2520 volatile OrigFn _orig = (orig); \
2521 volatile unsigned long _argvec[10]; \
2522 volatile unsigned long _res; \
2523 _argvec[0] = (unsigned long)_orig.nraddr; \
2524 _argvec[1] = (unsigned long)arg1; \
2525 _argvec[2] = (unsigned long)arg2; \
2526 _argvec[3] = (unsigned long)arg3; \
2527 _argvec[4] = (unsigned long)arg4; \
2528 _argvec[5] = (unsigned long)arg5; \
2529 _argvec[6] = (unsigned long)arg6; \
2530 _argvec[7] = (unsigned long)arg7; \
2531 _argvec[8] = (unsigned long)arg8; \
2532 _argvec[9] = (unsigned long)arg9; \
2534 VALGRIND_ALIGN_STACK \
2536 "addi 1,1,-16\n\t" \
2538 "lwz 3,36(11)\n\t" \
2541 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2543 "lwz 5,12(11)\n\t" \
2544 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2545 "lwz 7,20(11)\n\t" \
2546 "lwz 8,24(11)\n\t" \
2547 "lwz 9,28(11)\n\t" \
2548 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2549 "lwz 11,0(11)\n\t" /* target->r11 */ \
2550 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2551 VALGRIND_RESTORE_STACK \
2553 : /*out*/ "=r" (_res) \
2554 : /*in*/ "r" (&_argvec[0]) \
2555 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2557 lval = (__typeof__(lval)) _res; \
2560 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2561 arg7,arg8,arg9,arg10) \
2563 volatile OrigFn _orig = (orig); \
2564 volatile unsigned long _argvec[11]; \
2565 volatile unsigned long _res; \
2566 _argvec[0] = (unsigned long)_orig.nraddr; \
2567 _argvec[1] = (unsigned long)arg1; \
2568 _argvec[2] = (unsigned long)arg2; \
2569 _argvec[3] = (unsigned long)arg3; \
2570 _argvec[4] = (unsigned long)arg4; \
2571 _argvec[5] = (unsigned long)arg5; \
2572 _argvec[6] = (unsigned long)arg6; \
2573 _argvec[7] = (unsigned long)arg7; \
2574 _argvec[8] = (unsigned long)arg8; \
2575 _argvec[9] = (unsigned long)arg9; \
2576 _argvec[10] = (unsigned long)arg10; \
2578 VALGRIND_ALIGN_STACK \
2580 "addi 1,1,-16\n\t" \
2582 "lwz 3,40(11)\n\t" \
2585 "lwz 3,36(11)\n\t" \
2588 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2590 "lwz 5,12(11)\n\t" \
2591 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2592 "lwz 7,20(11)\n\t" \
2593 "lwz 8,24(11)\n\t" \
2594 "lwz 9,28(11)\n\t" \
2595 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2596 "lwz 11,0(11)\n\t" /* target->r11 */ \
2597 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2598 VALGRIND_RESTORE_STACK \
2600 : /*out*/ "=r" (_res) \
2601 : /*in*/ "r" (&_argvec[0]) \
2602 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2604 lval = (__typeof__(lval)) _res; \
2607 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2608 arg7,arg8,arg9,arg10,arg11) \
2610 volatile OrigFn _orig = (orig); \
2611 volatile unsigned long _argvec[12]; \
2612 volatile unsigned long _res; \
2613 _argvec[0] = (unsigned long)_orig.nraddr; \
2614 _argvec[1] = (unsigned long)arg1; \
2615 _argvec[2] = (unsigned long)arg2; \
2616 _argvec[3] = (unsigned long)arg3; \
2617 _argvec[4] = (unsigned long)arg4; \
2618 _argvec[5] = (unsigned long)arg5; \
2619 _argvec[6] = (unsigned long)arg6; \
2620 _argvec[7] = (unsigned long)arg7; \
2621 _argvec[8] = (unsigned long)arg8; \
2622 _argvec[9] = (unsigned long)arg9; \
2623 _argvec[10] = (unsigned long)arg10; \
2624 _argvec[11] = (unsigned long)arg11; \
2626 VALGRIND_ALIGN_STACK \
2628 "addi 1,1,-32\n\t" \
2630 "lwz 3,44(11)\n\t" \
2633 "lwz 3,40(11)\n\t" \
2636 "lwz 3,36(11)\n\t" \
2639 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2641 "lwz 5,12(11)\n\t" \
2642 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2643 "lwz 7,20(11)\n\t" \
2644 "lwz 8,24(11)\n\t" \
2645 "lwz 9,28(11)\n\t" \
2646 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2647 "lwz 11,0(11)\n\t" /* target->r11 */ \
2648 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2649 VALGRIND_RESTORE_STACK \
2651 : /*out*/ "=r" (_res) \
2652 : /*in*/ "r" (&_argvec[0]) \
2653 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2655 lval = (__typeof__(lval)) _res; \
2658 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2659 arg7,arg8,arg9,arg10,arg11,arg12) \
2661 volatile OrigFn _orig = (orig); \
2662 volatile unsigned long _argvec[13]; \
2663 volatile unsigned long _res; \
2664 _argvec[0] = (unsigned long)_orig.nraddr; \
2665 _argvec[1] = (unsigned long)arg1; \
2666 _argvec[2] = (unsigned long)arg2; \
2667 _argvec[3] = (unsigned long)arg3; \
2668 _argvec[4] = (unsigned long)arg4; \
2669 _argvec[5] = (unsigned long)arg5; \
2670 _argvec[6] = (unsigned long)arg6; \
2671 _argvec[7] = (unsigned long)arg7; \
2672 _argvec[8] = (unsigned long)arg8; \
2673 _argvec[9] = (unsigned long)arg9; \
2674 _argvec[10] = (unsigned long)arg10; \
2675 _argvec[11] = (unsigned long)arg11; \
2676 _argvec[12] = (unsigned long)arg12; \
2678 VALGRIND_ALIGN_STACK \
2680 "addi 1,1,-32\n\t" \
2682 "lwz 3,48(11)\n\t" \
2685 "lwz 3,44(11)\n\t" \
2688 "lwz 3,40(11)\n\t" \
2691 "lwz 3,36(11)\n\t" \
2694 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2696 "lwz 5,12(11)\n\t" \
2697 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2698 "lwz 7,20(11)\n\t" \
2699 "lwz 8,24(11)\n\t" \
2700 "lwz 9,28(11)\n\t" \
2701 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2702 "lwz 11,0(11)\n\t" /* target->r11 */ \
2703 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2704 VALGRIND_RESTORE_STACK \
2706 : /*out*/ "=r" (_res) \
2707 : /*in*/ "r" (&_argvec[0]) \
2708 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2710 lval = (__typeof__(lval)) _res; \
2713 #endif /* PLAT_ppc32_linux */
2715 /* ------------------------ ppc64-linux ------------------------ */
2717 #if defined(PLAT_ppc64be_linux)
2719 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2721 /* These regs are trashed by the hidden call. */
2722 #define __CALLER_SAVED_REGS \
2723 "lr", "ctr", "xer", \
2724 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2725 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2728 /* Macros to save and align the stack before making a function
2729 call and restore it afterwards as gcc may not keep the stack
2730 pointer aligned if it doesn't realise calls are being made
2731 to other functions. */
2733 #define VALGRIND_ALIGN_STACK \
2735 "rldicr 1,1,0,59\n\t"
2736 #define VALGRIND_RESTORE_STACK \
2739 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2742 #define CALL_FN_W_v(lval, orig) \
2744 volatile OrigFn _orig = (orig); \
2745 volatile unsigned long _argvec[3+0]; \
2746 volatile unsigned long _res; \
2747 /* _argvec[0] holds current r2 across the call */ \
2748 _argvec[1] = (unsigned long)_orig.r2; \
2749 _argvec[2] = (unsigned long)_orig.nraddr; \
2751 VALGRIND_ALIGN_STACK \
2753 "std 2,-16(11)\n\t" /* save tocptr */ \
2754 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2755 "ld 11, 0(11)\n\t" /* target->r11 */ \
2756 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2759 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2760 VALGRIND_RESTORE_STACK \
2761 : /*out*/ "=r" (_res) \
2762 : /*in*/ "r" (&_argvec[2]) \
2763 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2765 lval = (__typeof__(lval)) _res; \
2768 #define CALL_FN_W_W(lval, orig, arg1) \
2770 volatile OrigFn _orig = (orig); \
2771 volatile unsigned long _argvec[3+1]; \
2772 volatile unsigned long _res; \
2773 /* _argvec[0] holds current r2 across the call */ \
2774 _argvec[1] = (unsigned long)_orig.r2; \
2775 _argvec[2] = (unsigned long)_orig.nraddr; \
2776 _argvec[2+1] = (unsigned long)arg1; \
2778 VALGRIND_ALIGN_STACK \
2780 "std 2,-16(11)\n\t" /* save tocptr */ \
2781 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2782 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2783 "ld 11, 0(11)\n\t" /* target->r11 */ \
2784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2787 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2788 VALGRIND_RESTORE_STACK \
2789 : /*out*/ "=r" (_res) \
2790 : /*in*/ "r" (&_argvec[2]) \
2791 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2793 lval = (__typeof__(lval)) _res; \
2796 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2798 volatile OrigFn _orig = (orig); \
2799 volatile unsigned long _argvec[3+2]; \
2800 volatile unsigned long _res; \
2801 /* _argvec[0] holds current r2 across the call */ \
2802 _argvec[1] = (unsigned long)_orig.r2; \
2803 _argvec[2] = (unsigned long)_orig.nraddr; \
2804 _argvec[2+1] = (unsigned long)arg1; \
2805 _argvec[2+2] = (unsigned long)arg2; \
2807 VALGRIND_ALIGN_STACK \
2809 "std 2,-16(11)\n\t" /* save tocptr */ \
2810 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2811 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2812 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2813 "ld 11, 0(11)\n\t" /* target->r11 */ \
2814 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2817 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2818 VALGRIND_RESTORE_STACK \
2819 : /*out*/ "=r" (_res) \
2820 : /*in*/ "r" (&_argvec[2]) \
2821 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2823 lval = (__typeof__(lval)) _res; \
2826 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2828 volatile OrigFn _orig = (orig); \
2829 volatile unsigned long _argvec[3+3]; \
2830 volatile unsigned long _res; \
2831 /* _argvec[0] holds current r2 across the call */ \
2832 _argvec[1] = (unsigned long)_orig.r2; \
2833 _argvec[2] = (unsigned long)_orig.nraddr; \
2834 _argvec[2+1] = (unsigned long)arg1; \
2835 _argvec[2+2] = (unsigned long)arg2; \
2836 _argvec[2+3] = (unsigned long)arg3; \
2838 VALGRIND_ALIGN_STACK \
2840 "std 2,-16(11)\n\t" /* save tocptr */ \
2841 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2842 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2843 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2844 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2845 "ld 11, 0(11)\n\t" /* target->r11 */ \
2846 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2849 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2850 VALGRIND_RESTORE_STACK \
2851 : /*out*/ "=r" (_res) \
2852 : /*in*/ "r" (&_argvec[2]) \
2853 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2855 lval = (__typeof__(lval)) _res; \
2858 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2860 volatile OrigFn _orig = (orig); \
2861 volatile unsigned long _argvec[3+4]; \
2862 volatile unsigned long _res; \
2863 /* _argvec[0] holds current r2 across the call */ \
2864 _argvec[1] = (unsigned long)_orig.r2; \
2865 _argvec[2] = (unsigned long)_orig.nraddr; \
2866 _argvec[2+1] = (unsigned long)arg1; \
2867 _argvec[2+2] = (unsigned long)arg2; \
2868 _argvec[2+3] = (unsigned long)arg3; \
2869 _argvec[2+4] = (unsigned long)arg4; \
2871 VALGRIND_ALIGN_STACK \
2873 "std 2,-16(11)\n\t" /* save tocptr */ \
2874 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2875 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2876 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2877 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2878 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2879 "ld 11, 0(11)\n\t" /* target->r11 */ \
2880 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2883 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2884 VALGRIND_RESTORE_STACK \
2885 : /*out*/ "=r" (_res) \
2886 : /*in*/ "r" (&_argvec[2]) \
2887 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2889 lval = (__typeof__(lval)) _res; \
2892 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2894 volatile OrigFn _orig = (orig); \
2895 volatile unsigned long _argvec[3+5]; \
2896 volatile unsigned long _res; \
2897 /* _argvec[0] holds current r2 across the call */ \
2898 _argvec[1] = (unsigned long)_orig.r2; \
2899 _argvec[2] = (unsigned long)_orig.nraddr; \
2900 _argvec[2+1] = (unsigned long)arg1; \
2901 _argvec[2+2] = (unsigned long)arg2; \
2902 _argvec[2+3] = (unsigned long)arg3; \
2903 _argvec[2+4] = (unsigned long)arg4; \
2904 _argvec[2+5] = (unsigned long)arg5; \
2906 VALGRIND_ALIGN_STACK \
2908 "std 2,-16(11)\n\t" /* save tocptr */ \
2909 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2910 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2911 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2912 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2913 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2914 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2915 "ld 11, 0(11)\n\t" /* target->r11 */ \
2916 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2919 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2920 VALGRIND_RESTORE_STACK \
2921 : /*out*/ "=r" (_res) \
2922 : /*in*/ "r" (&_argvec[2]) \
2923 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2925 lval = (__typeof__(lval)) _res; \
2928 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2930 volatile OrigFn _orig = (orig); \
2931 volatile unsigned long _argvec[3+6]; \
2932 volatile unsigned long _res; \
2933 /* _argvec[0] holds current r2 across the call */ \
2934 _argvec[1] = (unsigned long)_orig.r2; \
2935 _argvec[2] = (unsigned long)_orig.nraddr; \
2936 _argvec[2+1] = (unsigned long)arg1; \
2937 _argvec[2+2] = (unsigned long)arg2; \
2938 _argvec[2+3] = (unsigned long)arg3; \
2939 _argvec[2+4] = (unsigned long)arg4; \
2940 _argvec[2+5] = (unsigned long)arg5; \
2941 _argvec[2+6] = (unsigned long)arg6; \
2943 VALGRIND_ALIGN_STACK \
2945 "std 2,-16(11)\n\t" /* save tocptr */ \
2946 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2947 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2948 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2949 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2950 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2951 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2952 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2953 "ld 11, 0(11)\n\t" /* target->r11 */ \
2954 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2957 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2958 VALGRIND_RESTORE_STACK \
2959 : /*out*/ "=r" (_res) \
2960 : /*in*/ "r" (&_argvec[2]) \
2961 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2963 lval = (__typeof__(lval)) _res; \
2966 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2969 volatile OrigFn _orig = (orig); \
2970 volatile unsigned long _argvec[3+7]; \
2971 volatile unsigned long _res; \
2972 /* _argvec[0] holds current r2 across the call */ \
2973 _argvec[1] = (unsigned long)_orig.r2; \
2974 _argvec[2] = (unsigned long)_orig.nraddr; \
2975 _argvec[2+1] = (unsigned long)arg1; \
2976 _argvec[2+2] = (unsigned long)arg2; \
2977 _argvec[2+3] = (unsigned long)arg3; \
2978 _argvec[2+4] = (unsigned long)arg4; \
2979 _argvec[2+5] = (unsigned long)arg5; \
2980 _argvec[2+6] = (unsigned long)arg6; \
2981 _argvec[2+7] = (unsigned long)arg7; \
2983 VALGRIND_ALIGN_STACK \
2985 "std 2,-16(11)\n\t" /* save tocptr */ \
2986 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2987 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2988 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2989 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2990 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2991 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2992 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2993 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2994 "ld 11, 0(11)\n\t" /* target->r11 */ \
2995 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2998 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2999 VALGRIND_RESTORE_STACK \
3000 : /*out*/ "=r" (_res) \
3001 : /*in*/ "r" (&_argvec[2]) \
3002 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3004 lval = (__typeof__(lval)) _res; \
3007 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3010 volatile OrigFn _orig = (orig); \
3011 volatile unsigned long _argvec[3+8]; \
3012 volatile unsigned long _res; \
3013 /* _argvec[0] holds current r2 across the call */ \
3014 _argvec[1] = (unsigned long)_orig.r2; \
3015 _argvec[2] = (unsigned long)_orig.nraddr; \
3016 _argvec[2+1] = (unsigned long)arg1; \
3017 _argvec[2+2] = (unsigned long)arg2; \
3018 _argvec[2+3] = (unsigned long)arg3; \
3019 _argvec[2+4] = (unsigned long)arg4; \
3020 _argvec[2+5] = (unsigned long)arg5; \
3021 _argvec[2+6] = (unsigned long)arg6; \
3022 _argvec[2+7] = (unsigned long)arg7; \
3023 _argvec[2+8] = (unsigned long)arg8; \
3025 VALGRIND_ALIGN_STACK \
3027 "std 2,-16(11)\n\t" /* save tocptr */ \
3028 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3029 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3030 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3031 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3032 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3033 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3034 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3035 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3036 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3037 "ld 11, 0(11)\n\t" /* target->r11 */ \
3038 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3041 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3042 VALGRIND_RESTORE_STACK \
3043 : /*out*/ "=r" (_res) \
3044 : /*in*/ "r" (&_argvec[2]) \
3045 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3047 lval = (__typeof__(lval)) _res; \
3050 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3053 volatile OrigFn _orig = (orig); \
3054 volatile unsigned long _argvec[3+9]; \
3055 volatile unsigned long _res; \
3056 /* _argvec[0] holds current r2 across the call */ \
3057 _argvec[1] = (unsigned long)_orig.r2; \
3058 _argvec[2] = (unsigned long)_orig.nraddr; \
3059 _argvec[2+1] = (unsigned long)arg1; \
3060 _argvec[2+2] = (unsigned long)arg2; \
3061 _argvec[2+3] = (unsigned long)arg3; \
3062 _argvec[2+4] = (unsigned long)arg4; \
3063 _argvec[2+5] = (unsigned long)arg5; \
3064 _argvec[2+6] = (unsigned long)arg6; \
3065 _argvec[2+7] = (unsigned long)arg7; \
3066 _argvec[2+8] = (unsigned long)arg8; \
3067 _argvec[2+9] = (unsigned long)arg9; \
3069 VALGRIND_ALIGN_STACK \
3071 "std 2,-16(11)\n\t" /* save tocptr */ \
3072 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3073 "addi 1,1,-128\n\t" /* expand stack frame */ \
3076 "std 3,112(1)\n\t" \
3078 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3079 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3080 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3081 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3082 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3083 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3084 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3085 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3086 "ld 11, 0(11)\n\t" /* target->r11 */ \
3087 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3090 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3091 VALGRIND_RESTORE_STACK \
3092 : /*out*/ "=r" (_res) \
3093 : /*in*/ "r" (&_argvec[2]) \
3094 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3096 lval = (__typeof__(lval)) _res; \
3099 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3100 arg7,arg8,arg9,arg10) \
3102 volatile OrigFn _orig = (orig); \
3103 volatile unsigned long _argvec[3+10]; \
3104 volatile unsigned long _res; \
3105 /* _argvec[0] holds current r2 across the call */ \
3106 _argvec[1] = (unsigned long)_orig.r2; \
3107 _argvec[2] = (unsigned long)_orig.nraddr; \
3108 _argvec[2+1] = (unsigned long)arg1; \
3109 _argvec[2+2] = (unsigned long)arg2; \
3110 _argvec[2+3] = (unsigned long)arg3; \
3111 _argvec[2+4] = (unsigned long)arg4; \
3112 _argvec[2+5] = (unsigned long)arg5; \
3113 _argvec[2+6] = (unsigned long)arg6; \
3114 _argvec[2+7] = (unsigned long)arg7; \
3115 _argvec[2+8] = (unsigned long)arg8; \
3116 _argvec[2+9] = (unsigned long)arg9; \
3117 _argvec[2+10] = (unsigned long)arg10; \
3119 VALGRIND_ALIGN_STACK \
3121 "std 2,-16(11)\n\t" /* save tocptr */ \
3122 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3123 "addi 1,1,-128\n\t" /* expand stack frame */ \
3126 "std 3,120(1)\n\t" \
3129 "std 3,112(1)\n\t" \
3131 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3132 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3133 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3134 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3135 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3136 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3137 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3138 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3139 "ld 11, 0(11)\n\t" /* target->r11 */ \
3140 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3143 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3144 VALGRIND_RESTORE_STACK \
3145 : /*out*/ "=r" (_res) \
3146 : /*in*/ "r" (&_argvec[2]) \
3147 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3149 lval = (__typeof__(lval)) _res; \
3152 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3153 arg7,arg8,arg9,arg10,arg11) \
3155 volatile OrigFn _orig = (orig); \
3156 volatile unsigned long _argvec[3+11]; \
3157 volatile unsigned long _res; \
3158 /* _argvec[0] holds current r2 across the call */ \
3159 _argvec[1] = (unsigned long)_orig.r2; \
3160 _argvec[2] = (unsigned long)_orig.nraddr; \
3161 _argvec[2+1] = (unsigned long)arg1; \
3162 _argvec[2+2] = (unsigned long)arg2; \
3163 _argvec[2+3] = (unsigned long)arg3; \
3164 _argvec[2+4] = (unsigned long)arg4; \
3165 _argvec[2+5] = (unsigned long)arg5; \
3166 _argvec[2+6] = (unsigned long)arg6; \
3167 _argvec[2+7] = (unsigned long)arg7; \
3168 _argvec[2+8] = (unsigned long)arg8; \
3169 _argvec[2+9] = (unsigned long)arg9; \
3170 _argvec[2+10] = (unsigned long)arg10; \
3171 _argvec[2+11] = (unsigned long)arg11; \
3173 VALGRIND_ALIGN_STACK \
3175 "std 2,-16(11)\n\t" /* save tocptr */ \
3176 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3177 "addi 1,1,-144\n\t" /* expand stack frame */ \
3180 "std 3,128(1)\n\t" \
3183 "std 3,120(1)\n\t" \
3186 "std 3,112(1)\n\t" \
3188 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3189 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3190 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3191 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3192 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3193 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3194 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3195 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3196 "ld 11, 0(11)\n\t" /* target->r11 */ \
3197 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3200 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3201 VALGRIND_RESTORE_STACK \
3202 : /*out*/ "=r" (_res) \
3203 : /*in*/ "r" (&_argvec[2]) \
3204 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3206 lval = (__typeof__(lval)) _res; \
3209 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3210 arg7,arg8,arg9,arg10,arg11,arg12) \
3212 volatile OrigFn _orig = (orig); \
3213 volatile unsigned long _argvec[3+12]; \
3214 volatile unsigned long _res; \
3215 /* _argvec[0] holds current r2 across the call */ \
3216 _argvec[1] = (unsigned long)_orig.r2; \
3217 _argvec[2] = (unsigned long)_orig.nraddr; \
3218 _argvec[2+1] = (unsigned long)arg1; \
3219 _argvec[2+2] = (unsigned long)arg2; \
3220 _argvec[2+3] = (unsigned long)arg3; \
3221 _argvec[2+4] = (unsigned long)arg4; \
3222 _argvec[2+5] = (unsigned long)arg5; \
3223 _argvec[2+6] = (unsigned long)arg6; \
3224 _argvec[2+7] = (unsigned long)arg7; \
3225 _argvec[2+8] = (unsigned long)arg8; \
3226 _argvec[2+9] = (unsigned long)arg9; \
3227 _argvec[2+10] = (unsigned long)arg10; \
3228 _argvec[2+11] = (unsigned long)arg11; \
3229 _argvec[2+12] = (unsigned long)arg12; \
3231 VALGRIND_ALIGN_STACK \
3233 "std 2,-16(11)\n\t" /* save tocptr */ \
3234 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3235 "addi 1,1,-144\n\t" /* expand stack frame */ \
3238 "std 3,136(1)\n\t" \
3241 "std 3,128(1)\n\t" \
3244 "std 3,120(1)\n\t" \
3247 "std 3,112(1)\n\t" \
3249 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3250 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3251 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3252 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3253 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3254 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3255 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3256 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3257 "ld 11, 0(11)\n\t" /* target->r11 */ \
3258 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3261 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3262 VALGRIND_RESTORE_STACK \
3263 : /*out*/ "=r" (_res) \
3264 : /*in*/ "r" (&_argvec[2]) \
3265 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3267 lval = (__typeof__(lval)) _res; \
3270 #endif /* PLAT_ppc64be_linux */
3272 /* ------------------------- ppc64le-linux ----------------------- */
3273 #if defined(PLAT_ppc64le_linux)
3275 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3277 /* These regs are trashed by the hidden call. */
3278 #define __CALLER_SAVED_REGS \
3279 "lr", "ctr", "xer", \
3280 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3281 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3284 /* Macros to save and align the stack before making a function
3285 call and restore it afterwards as gcc may not keep the stack
3286 pointer aligned if it doesn't realise calls are being made
3287 to other functions. */
3289 #define VALGRIND_ALIGN_STACK \
3291 "rldicr 1,1,0,59\n\t"
3292 #define VALGRIND_RESTORE_STACK \
3295 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3298 #define CALL_FN_W_v(lval, orig) \
3300 volatile OrigFn _orig = (orig); \
3301 volatile unsigned long _argvec[3+0]; \
3302 volatile unsigned long _res; \
3303 /* _argvec[0] holds current r2 across the call */ \
3304 _argvec[1] = (unsigned long)_orig.r2; \
3305 _argvec[2] = (unsigned long)_orig.nraddr; \
3307 VALGRIND_ALIGN_STACK \
3309 "std 2,-16(12)\n\t" /* save tocptr */ \
3310 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3311 "ld 12, 0(12)\n\t" /* target->r12 */ \
3312 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3315 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3316 VALGRIND_RESTORE_STACK \
3317 : /*out*/ "=r" (_res) \
3318 : /*in*/ "r" (&_argvec[2]) \
3319 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3321 lval = (__typeof__(lval)) _res; \
3324 #define CALL_FN_W_W(lval, orig, arg1) \
3326 volatile OrigFn _orig = (orig); \
3327 volatile unsigned long _argvec[3+1]; \
3328 volatile unsigned long _res; \
3329 /* _argvec[0] holds current r2 across the call */ \
3330 _argvec[1] = (unsigned long)_orig.r2; \
3331 _argvec[2] = (unsigned long)_orig.nraddr; \
3332 _argvec[2+1] = (unsigned long)arg1; \
3334 VALGRIND_ALIGN_STACK \
3336 "std 2,-16(12)\n\t" /* save tocptr */ \
3337 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3338 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3339 "ld 12, 0(12)\n\t" /* target->r12 */ \
3340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3343 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3344 VALGRIND_RESTORE_STACK \
3345 : /*out*/ "=r" (_res) \
3346 : /*in*/ "r" (&_argvec[2]) \
3347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3349 lval = (__typeof__(lval)) _res; \
3352 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3354 volatile OrigFn _orig = (orig); \
3355 volatile unsigned long _argvec[3+2]; \
3356 volatile unsigned long _res; \
3357 /* _argvec[0] holds current r2 across the call */ \
3358 _argvec[1] = (unsigned long)_orig.r2; \
3359 _argvec[2] = (unsigned long)_orig.nraddr; \
3360 _argvec[2+1] = (unsigned long)arg1; \
3361 _argvec[2+2] = (unsigned long)arg2; \
3363 VALGRIND_ALIGN_STACK \
3365 "std 2,-16(12)\n\t" /* save tocptr */ \
3366 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3367 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3368 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3369 "ld 12, 0(12)\n\t" /* target->r12 */ \
3370 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3373 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3374 VALGRIND_RESTORE_STACK \
3375 : /*out*/ "=r" (_res) \
3376 : /*in*/ "r" (&_argvec[2]) \
3377 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3379 lval = (__typeof__(lval)) _res; \
3382 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3384 volatile OrigFn _orig = (orig); \
3385 volatile unsigned long _argvec[3+3]; \
3386 volatile unsigned long _res; \
3387 /* _argvec[0] holds current r2 across the call */ \
3388 _argvec[1] = (unsigned long)_orig.r2; \
3389 _argvec[2] = (unsigned long)_orig.nraddr; \
3390 _argvec[2+1] = (unsigned long)arg1; \
3391 _argvec[2+2] = (unsigned long)arg2; \
3392 _argvec[2+3] = (unsigned long)arg3; \
3394 VALGRIND_ALIGN_STACK \
3396 "std 2,-16(12)\n\t" /* save tocptr */ \
3397 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3398 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3399 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3400 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3401 "ld 12, 0(12)\n\t" /* target->r12 */ \
3402 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3405 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3406 VALGRIND_RESTORE_STACK \
3407 : /*out*/ "=r" (_res) \
3408 : /*in*/ "r" (&_argvec[2]) \
3409 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3411 lval = (__typeof__(lval)) _res; \
3414 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3416 volatile OrigFn _orig = (orig); \
3417 volatile unsigned long _argvec[3+4]; \
3418 volatile unsigned long _res; \
3419 /* _argvec[0] holds current r2 across the call */ \
3420 _argvec[1] = (unsigned long)_orig.r2; \
3421 _argvec[2] = (unsigned long)_orig.nraddr; \
3422 _argvec[2+1] = (unsigned long)arg1; \
3423 _argvec[2+2] = (unsigned long)arg2; \
3424 _argvec[2+3] = (unsigned long)arg3; \
3425 _argvec[2+4] = (unsigned long)arg4; \
3427 VALGRIND_ALIGN_STACK \
3429 "std 2,-16(12)\n\t" /* save tocptr */ \
3430 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3431 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3432 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3433 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3434 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3435 "ld 12, 0(12)\n\t" /* target->r12 */ \
3436 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3439 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3440 VALGRIND_RESTORE_STACK \
3441 : /*out*/ "=r" (_res) \
3442 : /*in*/ "r" (&_argvec[2]) \
3443 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3445 lval = (__typeof__(lval)) _res; \
3448 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3450 volatile OrigFn _orig = (orig); \
3451 volatile unsigned long _argvec[3+5]; \
3452 volatile unsigned long _res; \
3453 /* _argvec[0] holds current r2 across the call */ \
3454 _argvec[1] = (unsigned long)_orig.r2; \
3455 _argvec[2] = (unsigned long)_orig.nraddr; \
3456 _argvec[2+1] = (unsigned long)arg1; \
3457 _argvec[2+2] = (unsigned long)arg2; \
3458 _argvec[2+3] = (unsigned long)arg3; \
3459 _argvec[2+4] = (unsigned long)arg4; \
3460 _argvec[2+5] = (unsigned long)arg5; \
3462 VALGRIND_ALIGN_STACK \
3464 "std 2,-16(12)\n\t" /* save tocptr */ \
3465 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3466 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3467 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3468 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3469 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3470 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3471 "ld 12, 0(12)\n\t" /* target->r12 */ \
3472 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3475 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3476 VALGRIND_RESTORE_STACK \
3477 : /*out*/ "=r" (_res) \
3478 : /*in*/ "r" (&_argvec[2]) \
3479 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3481 lval = (__typeof__(lval)) _res; \
3484 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3486 volatile OrigFn _orig = (orig); \
3487 volatile unsigned long _argvec[3+6]; \
3488 volatile unsigned long _res; \
3489 /* _argvec[0] holds current r2 across the call */ \
3490 _argvec[1] = (unsigned long)_orig.r2; \
3491 _argvec[2] = (unsigned long)_orig.nraddr; \
3492 _argvec[2+1] = (unsigned long)arg1; \
3493 _argvec[2+2] = (unsigned long)arg2; \
3494 _argvec[2+3] = (unsigned long)arg3; \
3495 _argvec[2+4] = (unsigned long)arg4; \
3496 _argvec[2+5] = (unsigned long)arg5; \
3497 _argvec[2+6] = (unsigned long)arg6; \
3499 VALGRIND_ALIGN_STACK \
3501 "std 2,-16(12)\n\t" /* save tocptr */ \
3502 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3503 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3504 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3505 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3506 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3507 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3508 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3509 "ld 12, 0(12)\n\t" /* target->r12 */ \
3510 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3513 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3514 VALGRIND_RESTORE_STACK \
3515 : /*out*/ "=r" (_res) \
3516 : /*in*/ "r" (&_argvec[2]) \
3517 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3519 lval = (__typeof__(lval)) _res; \
3522 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3525 volatile OrigFn _orig = (orig); \
3526 volatile unsigned long _argvec[3+7]; \
3527 volatile unsigned long _res; \
3528 /* _argvec[0] holds current r2 across the call */ \
3529 _argvec[1] = (unsigned long)_orig.r2; \
3530 _argvec[2] = (unsigned long)_orig.nraddr; \
3531 _argvec[2+1] = (unsigned long)arg1; \
3532 _argvec[2+2] = (unsigned long)arg2; \
3533 _argvec[2+3] = (unsigned long)arg3; \
3534 _argvec[2+4] = (unsigned long)arg4; \
3535 _argvec[2+5] = (unsigned long)arg5; \
3536 _argvec[2+6] = (unsigned long)arg6; \
3537 _argvec[2+7] = (unsigned long)arg7; \
3539 VALGRIND_ALIGN_STACK \
3541 "std 2,-16(12)\n\t" /* save tocptr */ \
3542 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3543 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3544 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3545 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3546 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3547 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3548 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3549 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3550 "ld 12, 0(12)\n\t" /* target->r12 */ \
3551 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3554 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3555 VALGRIND_RESTORE_STACK \
3556 : /*out*/ "=r" (_res) \
3557 : /*in*/ "r" (&_argvec[2]) \
3558 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3560 lval = (__typeof__(lval)) _res; \
3563 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3566 volatile OrigFn _orig = (orig); \
3567 volatile unsigned long _argvec[3+8]; \
3568 volatile unsigned long _res; \
3569 /* _argvec[0] holds current r2 across the call */ \
3570 _argvec[1] = (unsigned long)_orig.r2; \
3571 _argvec[2] = (unsigned long)_orig.nraddr; \
3572 _argvec[2+1] = (unsigned long)arg1; \
3573 _argvec[2+2] = (unsigned long)arg2; \
3574 _argvec[2+3] = (unsigned long)arg3; \
3575 _argvec[2+4] = (unsigned long)arg4; \
3576 _argvec[2+5] = (unsigned long)arg5; \
3577 _argvec[2+6] = (unsigned long)arg6; \
3578 _argvec[2+7] = (unsigned long)arg7; \
3579 _argvec[2+8] = (unsigned long)arg8; \
3581 VALGRIND_ALIGN_STACK \
3583 "std 2,-16(12)\n\t" /* save tocptr */ \
3584 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3585 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3586 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3587 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3588 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3589 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3590 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3591 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3592 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3593 "ld 12, 0(12)\n\t" /* target->r12 */ \
3594 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3597 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3598 VALGRIND_RESTORE_STACK \
3599 : /*out*/ "=r" (_res) \
3600 : /*in*/ "r" (&_argvec[2]) \
3601 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3603 lval = (__typeof__(lval)) _res; \
3606 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3609 volatile OrigFn _orig = (orig); \
3610 volatile unsigned long _argvec[3+9]; \
3611 volatile unsigned long _res; \
3612 /* _argvec[0] holds current r2 across the call */ \
3613 _argvec[1] = (unsigned long)_orig.r2; \
3614 _argvec[2] = (unsigned long)_orig.nraddr; \
3615 _argvec[2+1] = (unsigned long)arg1; \
3616 _argvec[2+2] = (unsigned long)arg2; \
3617 _argvec[2+3] = (unsigned long)arg3; \
3618 _argvec[2+4] = (unsigned long)arg4; \
3619 _argvec[2+5] = (unsigned long)arg5; \
3620 _argvec[2+6] = (unsigned long)arg6; \
3621 _argvec[2+7] = (unsigned long)arg7; \
3622 _argvec[2+8] = (unsigned long)arg8; \
3623 _argvec[2+9] = (unsigned long)arg9; \
3625 VALGRIND_ALIGN_STACK \
3627 "std 2,-16(12)\n\t" /* save tocptr */ \
3628 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3629 "addi 1,1,-128\n\t" /* expand stack frame */ \
3634 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3635 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3636 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3637 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3638 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3639 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3640 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3641 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3642 "ld 12, 0(12)\n\t" /* target->r12 */ \
3643 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3646 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3647 VALGRIND_RESTORE_STACK \
3648 : /*out*/ "=r" (_res) \
3649 : /*in*/ "r" (&_argvec[2]) \
3650 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3652 lval = (__typeof__(lval)) _res; \
3655 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3656 arg7,arg8,arg9,arg10) \
3658 volatile OrigFn _orig = (orig); \
3659 volatile unsigned long _argvec[3+10]; \
3660 volatile unsigned long _res; \
3661 /* _argvec[0] holds current r2 across the call */ \
3662 _argvec[1] = (unsigned long)_orig.r2; \
3663 _argvec[2] = (unsigned long)_orig.nraddr; \
3664 _argvec[2+1] = (unsigned long)arg1; \
3665 _argvec[2+2] = (unsigned long)arg2; \
3666 _argvec[2+3] = (unsigned long)arg3; \
3667 _argvec[2+4] = (unsigned long)arg4; \
3668 _argvec[2+5] = (unsigned long)arg5; \
3669 _argvec[2+6] = (unsigned long)arg6; \
3670 _argvec[2+7] = (unsigned long)arg7; \
3671 _argvec[2+8] = (unsigned long)arg8; \
3672 _argvec[2+9] = (unsigned long)arg9; \
3673 _argvec[2+10] = (unsigned long)arg10; \
3675 VALGRIND_ALIGN_STACK \
3677 "std 2,-16(12)\n\t" /* save tocptr */ \
3678 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3679 "addi 1,1,-128\n\t" /* expand stack frame */ \
3682 "std 3,104(1)\n\t" \
3687 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3688 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3689 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3690 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3691 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3692 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3693 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3694 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3695 "ld 12, 0(12)\n\t" /* target->r12 */ \
3696 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3699 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3700 VALGRIND_RESTORE_STACK \
3701 : /*out*/ "=r" (_res) \
3702 : /*in*/ "r" (&_argvec[2]) \
3703 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3705 lval = (__typeof__(lval)) _res; \
3708 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3709 arg7,arg8,arg9,arg10,arg11) \
3711 volatile OrigFn _orig = (orig); \
3712 volatile unsigned long _argvec[3+11]; \
3713 volatile unsigned long _res; \
3714 /* _argvec[0] holds current r2 across the call */ \
3715 _argvec[1] = (unsigned long)_orig.r2; \
3716 _argvec[2] = (unsigned long)_orig.nraddr; \
3717 _argvec[2+1] = (unsigned long)arg1; \
3718 _argvec[2+2] = (unsigned long)arg2; \
3719 _argvec[2+3] = (unsigned long)arg3; \
3720 _argvec[2+4] = (unsigned long)arg4; \
3721 _argvec[2+5] = (unsigned long)arg5; \
3722 _argvec[2+6] = (unsigned long)arg6; \
3723 _argvec[2+7] = (unsigned long)arg7; \
3724 _argvec[2+8] = (unsigned long)arg8; \
3725 _argvec[2+9] = (unsigned long)arg9; \
3726 _argvec[2+10] = (unsigned long)arg10; \
3727 _argvec[2+11] = (unsigned long)arg11; \
3729 VALGRIND_ALIGN_STACK \
3731 "std 2,-16(12)\n\t" /* save tocptr */ \
3732 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3733 "addi 1,1,-144\n\t" /* expand stack frame */ \
3736 "std 3,112(1)\n\t" \
3739 "std 3,104(1)\n\t" \
3744 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3745 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3746 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3747 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3748 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3749 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3750 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3751 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3752 "ld 12, 0(12)\n\t" /* target->r12 */ \
3753 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3756 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3757 VALGRIND_RESTORE_STACK \
3758 : /*out*/ "=r" (_res) \
3759 : /*in*/ "r" (&_argvec[2]) \
3760 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3762 lval = (__typeof__(lval)) _res; \
3765 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3766 arg7,arg8,arg9,arg10,arg11,arg12) \
3768 volatile OrigFn _orig = (orig); \
3769 volatile unsigned long _argvec[3+12]; \
3770 volatile unsigned long _res; \
3771 /* _argvec[0] holds current r2 across the call */ \
3772 _argvec[1] = (unsigned long)_orig.r2; \
3773 _argvec[2] = (unsigned long)_orig.nraddr; \
3774 _argvec[2+1] = (unsigned long)arg1; \
3775 _argvec[2+2] = (unsigned long)arg2; \
3776 _argvec[2+3] = (unsigned long)arg3; \
3777 _argvec[2+4] = (unsigned long)arg4; \
3778 _argvec[2+5] = (unsigned long)arg5; \
3779 _argvec[2+6] = (unsigned long)arg6; \
3780 _argvec[2+7] = (unsigned long)arg7; \
3781 _argvec[2+8] = (unsigned long)arg8; \
3782 _argvec[2+9] = (unsigned long)arg9; \
3783 _argvec[2+10] = (unsigned long)arg10; \
3784 _argvec[2+11] = (unsigned long)arg11; \
3785 _argvec[2+12] = (unsigned long)arg12; \
3787 VALGRIND_ALIGN_STACK \
3789 "std 2,-16(12)\n\t" /* save tocptr */ \
3790 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3791 "addi 1,1,-144\n\t" /* expand stack frame */ \
3794 "std 3,120(1)\n\t" \
3797 "std 3,112(1)\n\t" \
3800 "std 3,104(1)\n\t" \
3805 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3806 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3807 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3808 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3809 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3810 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3811 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3812 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3813 "ld 12, 0(12)\n\t" /* target->r12 */ \
3814 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3817 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3818 VALGRIND_RESTORE_STACK \
3819 : /*out*/ "=r" (_res) \
3820 : /*in*/ "r" (&_argvec[2]) \
3821 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3823 lval = (__typeof__(lval)) _res; \
3826 #endif /* PLAT_ppc64le_linux */
3828 /* ------------------------- arm-linux ------------------------- */
3830 #if defined(PLAT_arm_linux)
3832 /* These regs are trashed by the hidden call. */
3833 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3835 /* Macros to save and align the stack before making a function
3836 call and restore it afterwards as gcc may not keep the stack
3837 pointer aligned if it doesn't realise calls are being made
3838 to other functions. */
3840 /* This is a bit tricky. We store the original stack pointer in r10
3841 as it is callee-saves. gcc doesn't allow the use of r11 for some
3842 reason. Also, we can't directly "bic" the stack pointer in thumb
3843 mode since r13 isn't an allowed register number in that context.
3844 So use r4 as a temporary, since that is about to get trashed
3845 anyway, just after each use of this macro. Side effect is we need
3846 to be very careful about any future changes, since
3847 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3848 #define VALGRIND_ALIGN_STACK \
3851 "bic r4, r4, #7\n\t" \
3853 #define VALGRIND_RESTORE_STACK \
3856 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3859 #define CALL_FN_W_v(lval, orig) \
3861 volatile OrigFn _orig = (orig); \
3862 volatile unsigned long _argvec[1]; \
3863 volatile unsigned long _res; \
3864 _argvec[0] = (unsigned long)_orig.nraddr; \
3866 VALGRIND_ALIGN_STACK \
3867 "ldr r4, [%1] \n\t" /* target->r4 */ \
3868 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3869 VALGRIND_RESTORE_STACK \
3871 : /*out*/ "=r" (_res) \
3872 : /*in*/ "0" (&_argvec[0]) \
3873 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3875 lval = (__typeof__(lval)) _res; \
3878 #define CALL_FN_W_W(lval, orig, arg1) \
3880 volatile OrigFn _orig = (orig); \
3881 volatile unsigned long _argvec[2]; \
3882 volatile unsigned long _res; \
3883 _argvec[0] = (unsigned long)_orig.nraddr; \
3884 _argvec[1] = (unsigned long)(arg1); \
3886 VALGRIND_ALIGN_STACK \
3887 "ldr r0, [%1, #4] \n\t" \
3888 "ldr r4, [%1] \n\t" /* target->r4 */ \
3889 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3890 VALGRIND_RESTORE_STACK \
3892 : /*out*/ "=r" (_res) \
3893 : /*in*/ "0" (&_argvec[0]) \
3894 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3896 lval = (__typeof__(lval)) _res; \
3899 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3901 volatile OrigFn _orig = (orig); \
3902 volatile unsigned long _argvec[3]; \
3903 volatile unsigned long _res; \
3904 _argvec[0] = (unsigned long)_orig.nraddr; \
3905 _argvec[1] = (unsigned long)(arg1); \
3906 _argvec[2] = (unsigned long)(arg2); \
3908 VALGRIND_ALIGN_STACK \
3909 "ldr r0, [%1, #4] \n\t" \
3910 "ldr r1, [%1, #8] \n\t" \
3911 "ldr r4, [%1] \n\t" /* target->r4 */ \
3912 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3913 VALGRIND_RESTORE_STACK \
3915 : /*out*/ "=r" (_res) \
3916 : /*in*/ "0" (&_argvec[0]) \
3917 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3919 lval = (__typeof__(lval)) _res; \
3922 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3924 volatile OrigFn _orig = (orig); \
3925 volatile unsigned long _argvec[4]; \
3926 volatile unsigned long _res; \
3927 _argvec[0] = (unsigned long)_orig.nraddr; \
3928 _argvec[1] = (unsigned long)(arg1); \
3929 _argvec[2] = (unsigned long)(arg2); \
3930 _argvec[3] = (unsigned long)(arg3); \
3932 VALGRIND_ALIGN_STACK \
3933 "ldr r0, [%1, #4] \n\t" \
3934 "ldr r1, [%1, #8] \n\t" \
3935 "ldr r2, [%1, #12] \n\t" \
3936 "ldr r4, [%1] \n\t" /* target->r4 */ \
3937 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3938 VALGRIND_RESTORE_STACK \
3940 : /*out*/ "=r" (_res) \
3941 : /*in*/ "0" (&_argvec[0]) \
3942 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3944 lval = (__typeof__(lval)) _res; \
3947 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3949 volatile OrigFn _orig = (orig); \
3950 volatile unsigned long _argvec[5]; \
3951 volatile unsigned long _res; \
3952 _argvec[0] = (unsigned long)_orig.nraddr; \
3953 _argvec[1] = (unsigned long)(arg1); \
3954 _argvec[2] = (unsigned long)(arg2); \
3955 _argvec[3] = (unsigned long)(arg3); \
3956 _argvec[4] = (unsigned long)(arg4); \
3958 VALGRIND_ALIGN_STACK \
3959 "ldr r0, [%1, #4] \n\t" \
3960 "ldr r1, [%1, #8] \n\t" \
3961 "ldr r2, [%1, #12] \n\t" \
3962 "ldr r3, [%1, #16] \n\t" \
3963 "ldr r4, [%1] \n\t" /* target->r4 */ \
3964 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3965 VALGRIND_RESTORE_STACK \
3967 : /*out*/ "=r" (_res) \
3968 : /*in*/ "0" (&_argvec[0]) \
3969 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3971 lval = (__typeof__(lval)) _res; \
3974 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3976 volatile OrigFn _orig = (orig); \
3977 volatile unsigned long _argvec[6]; \
3978 volatile unsigned long _res; \
3979 _argvec[0] = (unsigned long)_orig.nraddr; \
3980 _argvec[1] = (unsigned long)(arg1); \
3981 _argvec[2] = (unsigned long)(arg2); \
3982 _argvec[3] = (unsigned long)(arg3); \
3983 _argvec[4] = (unsigned long)(arg4); \
3984 _argvec[5] = (unsigned long)(arg5); \
3986 VALGRIND_ALIGN_STACK \
3987 "sub sp, sp, #4 \n\t" \
3988 "ldr r0, [%1, #20] \n\t" \
3990 "ldr r0, [%1, #4] \n\t" \
3991 "ldr r1, [%1, #8] \n\t" \
3992 "ldr r2, [%1, #12] \n\t" \
3993 "ldr r3, [%1, #16] \n\t" \
3994 "ldr r4, [%1] \n\t" /* target->r4 */ \
3995 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3996 VALGRIND_RESTORE_STACK \
3998 : /*out*/ "=r" (_res) \
3999 : /*in*/ "0" (&_argvec[0]) \
4000 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4002 lval = (__typeof__(lval)) _res; \
4005 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4007 volatile OrigFn _orig = (orig); \
4008 volatile unsigned long _argvec[7]; \
4009 volatile unsigned long _res; \
4010 _argvec[0] = (unsigned long)_orig.nraddr; \
4011 _argvec[1] = (unsigned long)(arg1); \
4012 _argvec[2] = (unsigned long)(arg2); \
4013 _argvec[3] = (unsigned long)(arg3); \
4014 _argvec[4] = (unsigned long)(arg4); \
4015 _argvec[5] = (unsigned long)(arg5); \
4016 _argvec[6] = (unsigned long)(arg6); \
4018 VALGRIND_ALIGN_STACK \
4019 "ldr r0, [%1, #20] \n\t" \
4020 "ldr r1, [%1, #24] \n\t" \
4021 "push {r0, r1} \n\t" \
4022 "ldr r0, [%1, #4] \n\t" \
4023 "ldr r1, [%1, #8] \n\t" \
4024 "ldr r2, [%1, #12] \n\t" \
4025 "ldr r3, [%1, #16] \n\t" \
4026 "ldr r4, [%1] \n\t" /* target->r4 */ \
4027 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4028 VALGRIND_RESTORE_STACK \
4030 : /*out*/ "=r" (_res) \
4031 : /*in*/ "0" (&_argvec[0]) \
4032 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4034 lval = (__typeof__(lval)) _res; \
4037 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4040 volatile OrigFn _orig = (orig); \
4041 volatile unsigned long _argvec[8]; \
4042 volatile unsigned long _res; \
4043 _argvec[0] = (unsigned long)_orig.nraddr; \
4044 _argvec[1] = (unsigned long)(arg1); \
4045 _argvec[2] = (unsigned long)(arg2); \
4046 _argvec[3] = (unsigned long)(arg3); \
4047 _argvec[4] = (unsigned long)(arg4); \
4048 _argvec[5] = (unsigned long)(arg5); \
4049 _argvec[6] = (unsigned long)(arg6); \
4050 _argvec[7] = (unsigned long)(arg7); \
4052 VALGRIND_ALIGN_STACK \
4053 "sub sp, sp, #4 \n\t" \
4054 "ldr r0, [%1, #20] \n\t" \
4055 "ldr r1, [%1, #24] \n\t" \
4056 "ldr r2, [%1, #28] \n\t" \
4057 "push {r0, r1, r2} \n\t" \
4058 "ldr r0, [%1, #4] \n\t" \
4059 "ldr r1, [%1, #8] \n\t" \
4060 "ldr r2, [%1, #12] \n\t" \
4061 "ldr r3, [%1, #16] \n\t" \
4062 "ldr r4, [%1] \n\t" /* target->r4 */ \
4063 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4064 VALGRIND_RESTORE_STACK \
4066 : /*out*/ "=r" (_res) \
4067 : /*in*/ "0" (&_argvec[0]) \
4068 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4070 lval = (__typeof__(lval)) _res; \
4073 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4076 volatile OrigFn _orig = (orig); \
4077 volatile unsigned long _argvec[9]; \
4078 volatile unsigned long _res; \
4079 _argvec[0] = (unsigned long)_orig.nraddr; \
4080 _argvec[1] = (unsigned long)(arg1); \
4081 _argvec[2] = (unsigned long)(arg2); \
4082 _argvec[3] = (unsigned long)(arg3); \
4083 _argvec[4] = (unsigned long)(arg4); \
4084 _argvec[5] = (unsigned long)(arg5); \
4085 _argvec[6] = (unsigned long)(arg6); \
4086 _argvec[7] = (unsigned long)(arg7); \
4087 _argvec[8] = (unsigned long)(arg8); \
4089 VALGRIND_ALIGN_STACK \
4090 "ldr r0, [%1, #20] \n\t" \
4091 "ldr r1, [%1, #24] \n\t" \
4092 "ldr r2, [%1, #28] \n\t" \
4093 "ldr r3, [%1, #32] \n\t" \
4094 "push {r0, r1, r2, r3} \n\t" \
4095 "ldr r0, [%1, #4] \n\t" \
4096 "ldr r1, [%1, #8] \n\t" \
4097 "ldr r2, [%1, #12] \n\t" \
4098 "ldr r3, [%1, #16] \n\t" \
4099 "ldr r4, [%1] \n\t" /* target->r4 */ \
4100 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4101 VALGRIND_RESTORE_STACK \
4103 : /*out*/ "=r" (_res) \
4104 : /*in*/ "0" (&_argvec[0]) \
4105 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4107 lval = (__typeof__(lval)) _res; \
4110 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4113 volatile OrigFn _orig = (orig); \
4114 volatile unsigned long _argvec[10]; \
4115 volatile unsigned long _res; \
4116 _argvec[0] = (unsigned long)_orig.nraddr; \
4117 _argvec[1] = (unsigned long)(arg1); \
4118 _argvec[2] = (unsigned long)(arg2); \
4119 _argvec[3] = (unsigned long)(arg3); \
4120 _argvec[4] = (unsigned long)(arg4); \
4121 _argvec[5] = (unsigned long)(arg5); \
4122 _argvec[6] = (unsigned long)(arg6); \
4123 _argvec[7] = (unsigned long)(arg7); \
4124 _argvec[8] = (unsigned long)(arg8); \
4125 _argvec[9] = (unsigned long)(arg9); \
4127 VALGRIND_ALIGN_STACK \
4128 "sub sp, sp, #4 \n\t" \
4129 "ldr r0, [%1, #20] \n\t" \
4130 "ldr r1, [%1, #24] \n\t" \
4131 "ldr r2, [%1, #28] \n\t" \
4132 "ldr r3, [%1, #32] \n\t" \
4133 "ldr r4, [%1, #36] \n\t" \
4134 "push {r0, r1, r2, r3, r4} \n\t" \
4135 "ldr r0, [%1, #4] \n\t" \
4136 "ldr r1, [%1, #8] \n\t" \
4137 "ldr r2, [%1, #12] \n\t" \
4138 "ldr r3, [%1, #16] \n\t" \
4139 "ldr r4, [%1] \n\t" /* target->r4 */ \
4140 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4141 VALGRIND_RESTORE_STACK \
4143 : /*out*/ "=r" (_res) \
4144 : /*in*/ "0" (&_argvec[0]) \
4145 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4147 lval = (__typeof__(lval)) _res; \
4150 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4151 arg7,arg8,arg9,arg10) \
4153 volatile OrigFn _orig = (orig); \
4154 volatile unsigned long _argvec[11]; \
4155 volatile unsigned long _res; \
4156 _argvec[0] = (unsigned long)_orig.nraddr; \
4157 _argvec[1] = (unsigned long)(arg1); \
4158 _argvec[2] = (unsigned long)(arg2); \
4159 _argvec[3] = (unsigned long)(arg3); \
4160 _argvec[4] = (unsigned long)(arg4); \
4161 _argvec[5] = (unsigned long)(arg5); \
4162 _argvec[6] = (unsigned long)(arg6); \
4163 _argvec[7] = (unsigned long)(arg7); \
4164 _argvec[8] = (unsigned long)(arg8); \
4165 _argvec[9] = (unsigned long)(arg9); \
4166 _argvec[10] = (unsigned long)(arg10); \
4168 VALGRIND_ALIGN_STACK \
4169 "ldr r0, [%1, #40] \n\t" \
4171 "ldr r0, [%1, #20] \n\t" \
4172 "ldr r1, [%1, #24] \n\t" \
4173 "ldr r2, [%1, #28] \n\t" \
4174 "ldr r3, [%1, #32] \n\t" \
4175 "ldr r4, [%1, #36] \n\t" \
4176 "push {r0, r1, r2, r3, r4} \n\t" \
4177 "ldr r0, [%1, #4] \n\t" \
4178 "ldr r1, [%1, #8] \n\t" \
4179 "ldr r2, [%1, #12] \n\t" \
4180 "ldr r3, [%1, #16] \n\t" \
4181 "ldr r4, [%1] \n\t" /* target->r4 */ \
4182 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4183 VALGRIND_RESTORE_STACK \
4185 : /*out*/ "=r" (_res) \
4186 : /*in*/ "0" (&_argvec[0]) \
4187 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4189 lval = (__typeof__(lval)) _res; \
4192 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4193 arg6,arg7,arg8,arg9,arg10, \
4196 volatile OrigFn _orig = (orig); \
4197 volatile unsigned long _argvec[12]; \
4198 volatile unsigned long _res; \
4199 _argvec[0] = (unsigned long)_orig.nraddr; \
4200 _argvec[1] = (unsigned long)(arg1); \
4201 _argvec[2] = (unsigned long)(arg2); \
4202 _argvec[3] = (unsigned long)(arg3); \
4203 _argvec[4] = (unsigned long)(arg4); \
4204 _argvec[5] = (unsigned long)(arg5); \
4205 _argvec[6] = (unsigned long)(arg6); \
4206 _argvec[7] = (unsigned long)(arg7); \
4207 _argvec[8] = (unsigned long)(arg8); \
4208 _argvec[9] = (unsigned long)(arg9); \
4209 _argvec[10] = (unsigned long)(arg10); \
4210 _argvec[11] = (unsigned long)(arg11); \
4212 VALGRIND_ALIGN_STACK \
4213 "sub sp, sp, #4 \n\t" \
4214 "ldr r0, [%1, #40] \n\t" \
4215 "ldr r1, [%1, #44] \n\t" \
4216 "push {r0, r1} \n\t" \
4217 "ldr r0, [%1, #20] \n\t" \
4218 "ldr r1, [%1, #24] \n\t" \
4219 "ldr r2, [%1, #28] \n\t" \
4220 "ldr r3, [%1, #32] \n\t" \
4221 "ldr r4, [%1, #36] \n\t" \
4222 "push {r0, r1, r2, r3, r4} \n\t" \
4223 "ldr r0, [%1, #4] \n\t" \
4224 "ldr r1, [%1, #8] \n\t" \
4225 "ldr r2, [%1, #12] \n\t" \
4226 "ldr r3, [%1, #16] \n\t" \
4227 "ldr r4, [%1] \n\t" /* target->r4 */ \
4228 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4229 VALGRIND_RESTORE_STACK \
4231 : /*out*/ "=r" (_res) \
4232 : /*in*/ "0" (&_argvec[0]) \
4233 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4235 lval = (__typeof__(lval)) _res; \
4238 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4239 arg6,arg7,arg8,arg9,arg10, \
4242 volatile OrigFn _orig = (orig); \
4243 volatile unsigned long _argvec[13]; \
4244 volatile unsigned long _res; \
4245 _argvec[0] = (unsigned long)_orig.nraddr; \
4246 _argvec[1] = (unsigned long)(arg1); \
4247 _argvec[2] = (unsigned long)(arg2); \
4248 _argvec[3] = (unsigned long)(arg3); \
4249 _argvec[4] = (unsigned long)(arg4); \
4250 _argvec[5] = (unsigned long)(arg5); \
4251 _argvec[6] = (unsigned long)(arg6); \
4252 _argvec[7] = (unsigned long)(arg7); \
4253 _argvec[8] = (unsigned long)(arg8); \
4254 _argvec[9] = (unsigned long)(arg9); \
4255 _argvec[10] = (unsigned long)(arg10); \
4256 _argvec[11] = (unsigned long)(arg11); \
4257 _argvec[12] = (unsigned long)(arg12); \
4259 VALGRIND_ALIGN_STACK \
4260 "ldr r0, [%1, #40] \n\t" \
4261 "ldr r1, [%1, #44] \n\t" \
4262 "ldr r2, [%1, #48] \n\t" \
4263 "push {r0, r1, r2} \n\t" \
4264 "ldr r0, [%1, #20] \n\t" \
4265 "ldr r1, [%1, #24] \n\t" \
4266 "ldr r2, [%1, #28] \n\t" \
4267 "ldr r3, [%1, #32] \n\t" \
4268 "ldr r4, [%1, #36] \n\t" \
4269 "push {r0, r1, r2, r3, r4} \n\t" \
4270 "ldr r0, [%1, #4] \n\t" \
4271 "ldr r1, [%1, #8] \n\t" \
4272 "ldr r2, [%1, #12] \n\t" \
4273 "ldr r3, [%1, #16] \n\t" \
4274 "ldr r4, [%1] \n\t" /* target->r4 */ \
4275 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4276 VALGRIND_RESTORE_STACK \
4278 : /*out*/ "=r" (_res) \
4279 : /*in*/ "0" (&_argvec[0]) \
4280 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4282 lval = (__typeof__(lval)) _res; \
4285 #endif /* PLAT_arm_linux */
4287 /* ------------------------ arm64-linux ------------------------ */
4289 #if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
4291 /* These regs are trashed by the hidden call. */
4292 #define __CALLER_SAVED_REGS \
4293 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4294 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4295 "x18", "x19", "x20", "x30", \
4296 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4297 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4298 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4299 "v26", "v27", "v28", "v29", "v30", "v31"
4301 /* x21 is callee-saved, so we can use it to save and restore SP around
4303 #define VALGRIND_ALIGN_STACK \
4305 "bic sp, x21, #15\n\t"
4306 #define VALGRIND_RESTORE_STACK \
4309 /* These CALL_FN_ macros assume that on arm64-linux,
4310 sizeof(unsigned long) == 8. */
4312 #define CALL_FN_W_v(lval, orig) \
4314 volatile OrigFn _orig = (orig); \
4315 volatile unsigned long _argvec[1]; \
4316 volatile unsigned long _res; \
4317 _argvec[0] = (unsigned long)_orig.nraddr; \
4319 VALGRIND_ALIGN_STACK \
4320 "ldr x8, [%1] \n\t" /* target->x8 */ \
4321 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4322 VALGRIND_RESTORE_STACK \
4324 : /*out*/ "=r" (_res) \
4325 : /*in*/ "0" (&_argvec[0]) \
4326 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4328 lval = (__typeof__(lval)) _res; \
4331 #define CALL_FN_W_W(lval, orig, arg1) \
4333 volatile OrigFn _orig = (orig); \
4334 volatile unsigned long _argvec[2]; \
4335 volatile unsigned long _res; \
4336 _argvec[0] = (unsigned long)_orig.nraddr; \
4337 _argvec[1] = (unsigned long)(arg1); \
4339 VALGRIND_ALIGN_STACK \
4340 "ldr x0, [%1, #8] \n\t" \
4341 "ldr x8, [%1] \n\t" /* target->x8 */ \
4342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4343 VALGRIND_RESTORE_STACK \
4345 : /*out*/ "=r" (_res) \
4346 : /*in*/ "0" (&_argvec[0]) \
4347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4349 lval = (__typeof__(lval)) _res; \
4352 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4354 volatile OrigFn _orig = (orig); \
4355 volatile unsigned long _argvec[3]; \
4356 volatile unsigned long _res; \
4357 _argvec[0] = (unsigned long)_orig.nraddr; \
4358 _argvec[1] = (unsigned long)(arg1); \
4359 _argvec[2] = (unsigned long)(arg2); \
4361 VALGRIND_ALIGN_STACK \
4362 "ldr x0, [%1, #8] \n\t" \
4363 "ldr x1, [%1, #16] \n\t" \
4364 "ldr x8, [%1] \n\t" /* target->x8 */ \
4365 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4366 VALGRIND_RESTORE_STACK \
4368 : /*out*/ "=r" (_res) \
4369 : /*in*/ "0" (&_argvec[0]) \
4370 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4372 lval = (__typeof__(lval)) _res; \
4375 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4377 volatile OrigFn _orig = (orig); \
4378 volatile unsigned long _argvec[4]; \
4379 volatile unsigned long _res; \
4380 _argvec[0] = (unsigned long)_orig.nraddr; \
4381 _argvec[1] = (unsigned long)(arg1); \
4382 _argvec[2] = (unsigned long)(arg2); \
4383 _argvec[3] = (unsigned long)(arg3); \
4385 VALGRIND_ALIGN_STACK \
4386 "ldr x0, [%1, #8] \n\t" \
4387 "ldr x1, [%1, #16] \n\t" \
4388 "ldr x2, [%1, #24] \n\t" \
4389 "ldr x8, [%1] \n\t" /* target->x8 */ \
4390 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4391 VALGRIND_RESTORE_STACK \
4393 : /*out*/ "=r" (_res) \
4394 : /*in*/ "0" (&_argvec[0]) \
4395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4397 lval = (__typeof__(lval)) _res; \
4400 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4402 volatile OrigFn _orig = (orig); \
4403 volatile unsigned long _argvec[5]; \
4404 volatile unsigned long _res; \
4405 _argvec[0] = (unsigned long)_orig.nraddr; \
4406 _argvec[1] = (unsigned long)(arg1); \
4407 _argvec[2] = (unsigned long)(arg2); \
4408 _argvec[3] = (unsigned long)(arg3); \
4409 _argvec[4] = (unsigned long)(arg4); \
4411 VALGRIND_ALIGN_STACK \
4412 "ldr x0, [%1, #8] \n\t" \
4413 "ldr x1, [%1, #16] \n\t" \
4414 "ldr x2, [%1, #24] \n\t" \
4415 "ldr x3, [%1, #32] \n\t" \
4416 "ldr x8, [%1] \n\t" /* target->x8 */ \
4417 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4418 VALGRIND_RESTORE_STACK \
4420 : /*out*/ "=r" (_res) \
4421 : /*in*/ "0" (&_argvec[0]) \
4422 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4424 lval = (__typeof__(lval)) _res; \
4427 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4429 volatile OrigFn _orig = (orig); \
4430 volatile unsigned long _argvec[6]; \
4431 volatile unsigned long _res; \
4432 _argvec[0] = (unsigned long)_orig.nraddr; \
4433 _argvec[1] = (unsigned long)(arg1); \
4434 _argvec[2] = (unsigned long)(arg2); \
4435 _argvec[3] = (unsigned long)(arg3); \
4436 _argvec[4] = (unsigned long)(arg4); \
4437 _argvec[5] = (unsigned long)(arg5); \
4439 VALGRIND_ALIGN_STACK \
4440 "ldr x0, [%1, #8] \n\t" \
4441 "ldr x1, [%1, #16] \n\t" \
4442 "ldr x2, [%1, #24] \n\t" \
4443 "ldr x3, [%1, #32] \n\t" \
4444 "ldr x4, [%1, #40] \n\t" \
4445 "ldr x8, [%1] \n\t" /* target->x8 */ \
4446 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4447 VALGRIND_RESTORE_STACK \
4449 : /*out*/ "=r" (_res) \
4450 : /*in*/ "0" (&_argvec[0]) \
4451 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4453 lval = (__typeof__(lval)) _res; \
4456 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4458 volatile OrigFn _orig = (orig); \
4459 volatile unsigned long _argvec[7]; \
4460 volatile unsigned long _res; \
4461 _argvec[0] = (unsigned long)_orig.nraddr; \
4462 _argvec[1] = (unsigned long)(arg1); \
4463 _argvec[2] = (unsigned long)(arg2); \
4464 _argvec[3] = (unsigned long)(arg3); \
4465 _argvec[4] = (unsigned long)(arg4); \
4466 _argvec[5] = (unsigned long)(arg5); \
4467 _argvec[6] = (unsigned long)(arg6); \
4469 VALGRIND_ALIGN_STACK \
4470 "ldr x0, [%1, #8] \n\t" \
4471 "ldr x1, [%1, #16] \n\t" \
4472 "ldr x2, [%1, #24] \n\t" \
4473 "ldr x3, [%1, #32] \n\t" \
4474 "ldr x4, [%1, #40] \n\t" \
4475 "ldr x5, [%1, #48] \n\t" \
4476 "ldr x8, [%1] \n\t" /* target->x8 */ \
4477 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4478 VALGRIND_RESTORE_STACK \
4480 : /*out*/ "=r" (_res) \
4481 : /*in*/ "0" (&_argvec[0]) \
4482 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4484 lval = (__typeof__(lval)) _res; \
4487 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4490 volatile OrigFn _orig = (orig); \
4491 volatile unsigned long _argvec[8]; \
4492 volatile unsigned long _res; \
4493 _argvec[0] = (unsigned long)_orig.nraddr; \
4494 _argvec[1] = (unsigned long)(arg1); \
4495 _argvec[2] = (unsigned long)(arg2); \
4496 _argvec[3] = (unsigned long)(arg3); \
4497 _argvec[4] = (unsigned long)(arg4); \
4498 _argvec[5] = (unsigned long)(arg5); \
4499 _argvec[6] = (unsigned long)(arg6); \
4500 _argvec[7] = (unsigned long)(arg7); \
4502 VALGRIND_ALIGN_STACK \
4503 "ldr x0, [%1, #8] \n\t" \
4504 "ldr x1, [%1, #16] \n\t" \
4505 "ldr x2, [%1, #24] \n\t" \
4506 "ldr x3, [%1, #32] \n\t" \
4507 "ldr x4, [%1, #40] \n\t" \
4508 "ldr x5, [%1, #48] \n\t" \
4509 "ldr x6, [%1, #56] \n\t" \
4510 "ldr x8, [%1] \n\t" /* target->x8 */ \
4511 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4512 VALGRIND_RESTORE_STACK \
4514 : /*out*/ "=r" (_res) \
4515 : /*in*/ "0" (&_argvec[0]) \
4516 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4518 lval = (__typeof__(lval)) _res; \
4521 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4524 volatile OrigFn _orig = (orig); \
4525 volatile unsigned long _argvec[9]; \
4526 volatile unsigned long _res; \
4527 _argvec[0] = (unsigned long)_orig.nraddr; \
4528 _argvec[1] = (unsigned long)(arg1); \
4529 _argvec[2] = (unsigned long)(arg2); \
4530 _argvec[3] = (unsigned long)(arg3); \
4531 _argvec[4] = (unsigned long)(arg4); \
4532 _argvec[5] = (unsigned long)(arg5); \
4533 _argvec[6] = (unsigned long)(arg6); \
4534 _argvec[7] = (unsigned long)(arg7); \
4535 _argvec[8] = (unsigned long)(arg8); \
4537 VALGRIND_ALIGN_STACK \
4538 "ldr x0, [%1, #8] \n\t" \
4539 "ldr x1, [%1, #16] \n\t" \
4540 "ldr x2, [%1, #24] \n\t" \
4541 "ldr x3, [%1, #32] \n\t" \
4542 "ldr x4, [%1, #40] \n\t" \
4543 "ldr x5, [%1, #48] \n\t" \
4544 "ldr x6, [%1, #56] \n\t" \
4545 "ldr x7, [%1, #64] \n\t" \
4546 "ldr x8, [%1] \n\t" /* target->x8 */ \
4547 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4548 VALGRIND_RESTORE_STACK \
4550 : /*out*/ "=r" (_res) \
4551 : /*in*/ "0" (&_argvec[0]) \
4552 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4554 lval = (__typeof__(lval)) _res; \
4557 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4560 volatile OrigFn _orig = (orig); \
4561 volatile unsigned long _argvec[10]; \
4562 volatile unsigned long _res; \
4563 _argvec[0] = (unsigned long)_orig.nraddr; \
4564 _argvec[1] = (unsigned long)(arg1); \
4565 _argvec[2] = (unsigned long)(arg2); \
4566 _argvec[3] = (unsigned long)(arg3); \
4567 _argvec[4] = (unsigned long)(arg4); \
4568 _argvec[5] = (unsigned long)(arg5); \
4569 _argvec[6] = (unsigned long)(arg6); \
4570 _argvec[7] = (unsigned long)(arg7); \
4571 _argvec[8] = (unsigned long)(arg8); \
4572 _argvec[9] = (unsigned long)(arg9); \
4574 VALGRIND_ALIGN_STACK \
4575 "sub sp, sp, #0x20 \n\t" \
4576 "ldr x0, [%1, #8] \n\t" \
4577 "ldr x1, [%1, #16] \n\t" \
4578 "ldr x2, [%1, #24] \n\t" \
4579 "ldr x3, [%1, #32] \n\t" \
4580 "ldr x4, [%1, #40] \n\t" \
4581 "ldr x5, [%1, #48] \n\t" \
4582 "ldr x6, [%1, #56] \n\t" \
4583 "ldr x7, [%1, #64] \n\t" \
4584 "ldr x8, [%1, #72] \n\t" \
4585 "str x8, [sp, #0] \n\t" \
4586 "ldr x8, [%1] \n\t" /* target->x8 */ \
4587 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4588 VALGRIND_RESTORE_STACK \
4590 : /*out*/ "=r" (_res) \
4591 : /*in*/ "0" (&_argvec[0]) \
4592 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4594 lval = (__typeof__(lval)) _res; \
4597 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4598 arg7,arg8,arg9,arg10) \
4600 volatile OrigFn _orig = (orig); \
4601 volatile unsigned long _argvec[11]; \
4602 volatile unsigned long _res; \
4603 _argvec[0] = (unsigned long)_orig.nraddr; \
4604 _argvec[1] = (unsigned long)(arg1); \
4605 _argvec[2] = (unsigned long)(arg2); \
4606 _argvec[3] = (unsigned long)(arg3); \
4607 _argvec[4] = (unsigned long)(arg4); \
4608 _argvec[5] = (unsigned long)(arg5); \
4609 _argvec[6] = (unsigned long)(arg6); \
4610 _argvec[7] = (unsigned long)(arg7); \
4611 _argvec[8] = (unsigned long)(arg8); \
4612 _argvec[9] = (unsigned long)(arg9); \
4613 _argvec[10] = (unsigned long)(arg10); \
4615 VALGRIND_ALIGN_STACK \
4616 "sub sp, sp, #0x20 \n\t" \
4617 "ldr x0, [%1, #8] \n\t" \
4618 "ldr x1, [%1, #16] \n\t" \
4619 "ldr x2, [%1, #24] \n\t" \
4620 "ldr x3, [%1, #32] \n\t" \
4621 "ldr x4, [%1, #40] \n\t" \
4622 "ldr x5, [%1, #48] \n\t" \
4623 "ldr x6, [%1, #56] \n\t" \
4624 "ldr x7, [%1, #64] \n\t" \
4625 "ldr x8, [%1, #72] \n\t" \
4626 "str x8, [sp, #0] \n\t" \
4627 "ldr x8, [%1, #80] \n\t" \
4628 "str x8, [sp, #8] \n\t" \
4629 "ldr x8, [%1] \n\t" /* target->x8 */ \
4630 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4631 VALGRIND_RESTORE_STACK \
4633 : /*out*/ "=r" (_res) \
4634 : /*in*/ "0" (&_argvec[0]) \
4635 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4637 lval = (__typeof__(lval)) _res; \
4640 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4641 arg7,arg8,arg9,arg10,arg11) \
4643 volatile OrigFn _orig = (orig); \
4644 volatile unsigned long _argvec[12]; \
4645 volatile unsigned long _res; \
4646 _argvec[0] = (unsigned long)_orig.nraddr; \
4647 _argvec[1] = (unsigned long)(arg1); \
4648 _argvec[2] = (unsigned long)(arg2); \
4649 _argvec[3] = (unsigned long)(arg3); \
4650 _argvec[4] = (unsigned long)(arg4); \
4651 _argvec[5] = (unsigned long)(arg5); \
4652 _argvec[6] = (unsigned long)(arg6); \
4653 _argvec[7] = (unsigned long)(arg7); \
4654 _argvec[8] = (unsigned long)(arg8); \
4655 _argvec[9] = (unsigned long)(arg9); \
4656 _argvec[10] = (unsigned long)(arg10); \
4657 _argvec[11] = (unsigned long)(arg11); \
4659 VALGRIND_ALIGN_STACK \
4660 "sub sp, sp, #0x30 \n\t" \
4661 "ldr x0, [%1, #8] \n\t" \
4662 "ldr x1, [%1, #16] \n\t" \
4663 "ldr x2, [%1, #24] \n\t" \
4664 "ldr x3, [%1, #32] \n\t" \
4665 "ldr x4, [%1, #40] \n\t" \
4666 "ldr x5, [%1, #48] \n\t" \
4667 "ldr x6, [%1, #56] \n\t" \
4668 "ldr x7, [%1, #64] \n\t" \
4669 "ldr x8, [%1, #72] \n\t" \
4670 "str x8, [sp, #0] \n\t" \
4671 "ldr x8, [%1, #80] \n\t" \
4672 "str x8, [sp, #8] \n\t" \
4673 "ldr x8, [%1, #88] \n\t" \
4674 "str x8, [sp, #16] \n\t" \
4675 "ldr x8, [%1] \n\t" /* target->x8 */ \
4676 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4677 VALGRIND_RESTORE_STACK \
4679 : /*out*/ "=r" (_res) \
4680 : /*in*/ "0" (&_argvec[0]) \
4681 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4683 lval = (__typeof__(lval)) _res; \
4686 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4687 arg7,arg8,arg9,arg10,arg11, \
4690 volatile OrigFn _orig = (orig); \
4691 volatile unsigned long _argvec[13]; \
4692 volatile unsigned long _res; \
4693 _argvec[0] = (unsigned long)_orig.nraddr; \
4694 _argvec[1] = (unsigned long)(arg1); \
4695 _argvec[2] = (unsigned long)(arg2); \
4696 _argvec[3] = (unsigned long)(arg3); \
4697 _argvec[4] = (unsigned long)(arg4); \
4698 _argvec[5] = (unsigned long)(arg5); \
4699 _argvec[6] = (unsigned long)(arg6); \
4700 _argvec[7] = (unsigned long)(arg7); \
4701 _argvec[8] = (unsigned long)(arg8); \
4702 _argvec[9] = (unsigned long)(arg9); \
4703 _argvec[10] = (unsigned long)(arg10); \
4704 _argvec[11] = (unsigned long)(arg11); \
4705 _argvec[12] = (unsigned long)(arg12); \
4707 VALGRIND_ALIGN_STACK \
4708 "sub sp, sp, #0x30 \n\t" \
4709 "ldr x0, [%1, #8] \n\t" \
4710 "ldr x1, [%1, #16] \n\t" \
4711 "ldr x2, [%1, #24] \n\t" \
4712 "ldr x3, [%1, #32] \n\t" \
4713 "ldr x4, [%1, #40] \n\t" \
4714 "ldr x5, [%1, #48] \n\t" \
4715 "ldr x6, [%1, #56] \n\t" \
4716 "ldr x7, [%1, #64] \n\t" \
4717 "ldr x8, [%1, #72] \n\t" \
4718 "str x8, [sp, #0] \n\t" \
4719 "ldr x8, [%1, #80] \n\t" \
4720 "str x8, [sp, #8] \n\t" \
4721 "ldr x8, [%1, #88] \n\t" \
4722 "str x8, [sp, #16] \n\t" \
4723 "ldr x8, [%1, #96] \n\t" \
4724 "str x8, [sp, #24] \n\t" \
4725 "ldr x8, [%1] \n\t" /* target->x8 */ \
4726 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4727 VALGRIND_RESTORE_STACK \
4729 : /*out*/ "=r" (_res) \
4730 : /*in*/ "0" (&_argvec[0]) \
4731 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4733 lval = (__typeof__(lval)) _res; \
4736 #endif /* PLAT_arm64_linux */
4738 /* ------------------------- s390x-linux ------------------------- */
4740 #if defined(PLAT_s390x_linux)
4742 /* Similar workaround as amd64 (see above), but we use r11 as frame
4743 pointer and save the old r11 in r7. r11 might be used for
4744 argvec, therefore we copy argvec in r1 since r1 is clobbered
4745 after the call anyway. */
4746 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4747 # define __FRAME_POINTER \
4748 ,"d"(__builtin_dwarf_cfa())
4749 # define VALGRIND_CFI_PROLOGUE \
4750 ".cfi_remember_state\n\t" \
4751 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4754 ".cfi_def_cfa 11, 0\n\t"
4755 # define VALGRIND_CFI_EPILOGUE \
4757 ".cfi_restore_state\n\t"
4759 # define __FRAME_POINTER
4760 # define VALGRIND_CFI_PROLOGUE \
4762 # define VALGRIND_CFI_EPILOGUE
4765 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4766 according to the s390 GCC maintainer. (The ABI specification is not
4767 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4768 VALGRIND_RESTORE_STACK are not defined here. */
4770 /* These regs are trashed by the hidden call. Note that we overwrite
4771 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4772 function a proper return address. All others are ABI defined call
4774 #if defined(__VX__) || defined(__S390_VX__)
4775 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4776 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4777 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4778 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4779 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4781 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4782 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4785 /* Nb: Although r11 is modified in the asm snippets below (inside
4786 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4788 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4790 (2) GCC will complain that r11 cannot appear inside a clobber section,
4791 when compiled with -O -fno-omit-frame-pointer
4794 #define CALL_FN_W_v(lval, orig) \
4796 volatile OrigFn _orig = (orig); \
4797 volatile unsigned long _argvec[1]; \
4798 volatile unsigned long _res; \
4799 _argvec[0] = (unsigned long)_orig.nraddr; \
4801 VALGRIND_CFI_PROLOGUE \
4802 "aghi 15,-160\n\t" \
4803 "lg 1, 0(1)\n\t" /* target->r1 */ \
4804 VALGRIND_CALL_NOREDIR_R1 \
4806 VALGRIND_CFI_EPILOGUE \
4808 : /*out*/ "=d" (_res) \
4809 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4812 lval = (__typeof__(lval)) _res; \
4815 /* The call abi has the arguments in r2-r6 and stack */
4816 #define CALL_FN_W_W(lval, orig, arg1) \
4818 volatile OrigFn _orig = (orig); \
4819 volatile unsigned long _argvec[2]; \
4820 volatile unsigned long _res; \
4821 _argvec[0] = (unsigned long)_orig.nraddr; \
4822 _argvec[1] = (unsigned long)arg1; \
4824 VALGRIND_CFI_PROLOGUE \
4825 "aghi 15,-160\n\t" \
4828 VALGRIND_CALL_NOREDIR_R1 \
4830 VALGRIND_CFI_EPILOGUE \
4832 : /*out*/ "=d" (_res) \
4833 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4834 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4836 lval = (__typeof__(lval)) _res; \
4839 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4841 volatile OrigFn _orig = (orig); \
4842 volatile unsigned long _argvec[3]; \
4843 volatile unsigned long _res; \
4844 _argvec[0] = (unsigned long)_orig.nraddr; \
4845 _argvec[1] = (unsigned long)arg1; \
4846 _argvec[2] = (unsigned long)arg2; \
4848 VALGRIND_CFI_PROLOGUE \
4849 "aghi 15,-160\n\t" \
4853 VALGRIND_CALL_NOREDIR_R1 \
4855 VALGRIND_CFI_EPILOGUE \
4857 : /*out*/ "=d" (_res) \
4858 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4859 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4861 lval = (__typeof__(lval)) _res; \
4864 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4866 volatile OrigFn _orig = (orig); \
4867 volatile unsigned long _argvec[4]; \
4868 volatile unsigned long _res; \
4869 _argvec[0] = (unsigned long)_orig.nraddr; \
4870 _argvec[1] = (unsigned long)arg1; \
4871 _argvec[2] = (unsigned long)arg2; \
4872 _argvec[3] = (unsigned long)arg3; \
4874 VALGRIND_CFI_PROLOGUE \
4875 "aghi 15,-160\n\t" \
4880 VALGRIND_CALL_NOREDIR_R1 \
4882 VALGRIND_CFI_EPILOGUE \
4884 : /*out*/ "=d" (_res) \
4885 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4886 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4888 lval = (__typeof__(lval)) _res; \
4891 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4893 volatile OrigFn _orig = (orig); \
4894 volatile unsigned long _argvec[5]; \
4895 volatile unsigned long _res; \
4896 _argvec[0] = (unsigned long)_orig.nraddr; \
4897 _argvec[1] = (unsigned long)arg1; \
4898 _argvec[2] = (unsigned long)arg2; \
4899 _argvec[3] = (unsigned long)arg3; \
4900 _argvec[4] = (unsigned long)arg4; \
4902 VALGRIND_CFI_PROLOGUE \
4903 "aghi 15,-160\n\t" \
4909 VALGRIND_CALL_NOREDIR_R1 \
4911 VALGRIND_CFI_EPILOGUE \
4913 : /*out*/ "=d" (_res) \
4914 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4915 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4917 lval = (__typeof__(lval)) _res; \
4920 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4922 volatile OrigFn _orig = (orig); \
4923 volatile unsigned long _argvec[6]; \
4924 volatile unsigned long _res; \
4925 _argvec[0] = (unsigned long)_orig.nraddr; \
4926 _argvec[1] = (unsigned long)arg1; \
4927 _argvec[2] = (unsigned long)arg2; \
4928 _argvec[3] = (unsigned long)arg3; \
4929 _argvec[4] = (unsigned long)arg4; \
4930 _argvec[5] = (unsigned long)arg5; \
4932 VALGRIND_CFI_PROLOGUE \
4933 "aghi 15,-160\n\t" \
4940 VALGRIND_CALL_NOREDIR_R1 \
4942 VALGRIND_CFI_EPILOGUE \
4944 : /*out*/ "=d" (_res) \
4945 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4946 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4948 lval = (__typeof__(lval)) _res; \
4951 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4954 volatile OrigFn _orig = (orig); \
4955 volatile unsigned long _argvec[7]; \
4956 volatile unsigned long _res; \
4957 _argvec[0] = (unsigned long)_orig.nraddr; \
4958 _argvec[1] = (unsigned long)arg1; \
4959 _argvec[2] = (unsigned long)arg2; \
4960 _argvec[3] = (unsigned long)arg3; \
4961 _argvec[4] = (unsigned long)arg4; \
4962 _argvec[5] = (unsigned long)arg5; \
4963 _argvec[6] = (unsigned long)arg6; \
4965 VALGRIND_CFI_PROLOGUE \
4966 "aghi 15,-168\n\t" \
4972 "mvc 160(8,15), 48(1)\n\t" \
4974 VALGRIND_CALL_NOREDIR_R1 \
4976 VALGRIND_CFI_EPILOGUE \
4978 : /*out*/ "=d" (_res) \
4979 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4980 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4982 lval = (__typeof__(lval)) _res; \
4985 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4988 volatile OrigFn _orig = (orig); \
4989 volatile unsigned long _argvec[8]; \
4990 volatile unsigned long _res; \
4991 _argvec[0] = (unsigned long)_orig.nraddr; \
4992 _argvec[1] = (unsigned long)arg1; \
4993 _argvec[2] = (unsigned long)arg2; \
4994 _argvec[3] = (unsigned long)arg3; \
4995 _argvec[4] = (unsigned long)arg4; \
4996 _argvec[5] = (unsigned long)arg5; \
4997 _argvec[6] = (unsigned long)arg6; \
4998 _argvec[7] = (unsigned long)arg7; \
5000 VALGRIND_CFI_PROLOGUE \
5001 "aghi 15,-176\n\t" \
5007 "mvc 160(8,15), 48(1)\n\t" \
5008 "mvc 168(8,15), 56(1)\n\t" \
5010 VALGRIND_CALL_NOREDIR_R1 \
5012 VALGRIND_CFI_EPILOGUE \
5014 : /*out*/ "=d" (_res) \
5015 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5016 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5018 lval = (__typeof__(lval)) _res; \
5021 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5024 volatile OrigFn _orig = (orig); \
5025 volatile unsigned long _argvec[9]; \
5026 volatile unsigned long _res; \
5027 _argvec[0] = (unsigned long)_orig.nraddr; \
5028 _argvec[1] = (unsigned long)arg1; \
5029 _argvec[2] = (unsigned long)arg2; \
5030 _argvec[3] = (unsigned long)arg3; \
5031 _argvec[4] = (unsigned long)arg4; \
5032 _argvec[5] = (unsigned long)arg5; \
5033 _argvec[6] = (unsigned long)arg6; \
5034 _argvec[7] = (unsigned long)arg7; \
5035 _argvec[8] = (unsigned long)arg8; \
5037 VALGRIND_CFI_PROLOGUE \
5038 "aghi 15,-184\n\t" \
5044 "mvc 160(8,15), 48(1)\n\t" \
5045 "mvc 168(8,15), 56(1)\n\t" \
5046 "mvc 176(8,15), 64(1)\n\t" \
5048 VALGRIND_CALL_NOREDIR_R1 \
5050 VALGRIND_CFI_EPILOGUE \
5052 : /*out*/ "=d" (_res) \
5053 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5054 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5056 lval = (__typeof__(lval)) _res; \
5059 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5060 arg6, arg7 ,arg8, arg9) \
5062 volatile OrigFn _orig = (orig); \
5063 volatile unsigned long _argvec[10]; \
5064 volatile unsigned long _res; \
5065 _argvec[0] = (unsigned long)_orig.nraddr; \
5066 _argvec[1] = (unsigned long)arg1; \
5067 _argvec[2] = (unsigned long)arg2; \
5068 _argvec[3] = (unsigned long)arg3; \
5069 _argvec[4] = (unsigned long)arg4; \
5070 _argvec[5] = (unsigned long)arg5; \
5071 _argvec[6] = (unsigned long)arg6; \
5072 _argvec[7] = (unsigned long)arg7; \
5073 _argvec[8] = (unsigned long)arg8; \
5074 _argvec[9] = (unsigned long)arg9; \
5076 VALGRIND_CFI_PROLOGUE \
5077 "aghi 15,-192\n\t" \
5083 "mvc 160(8,15), 48(1)\n\t" \
5084 "mvc 168(8,15), 56(1)\n\t" \
5085 "mvc 176(8,15), 64(1)\n\t" \
5086 "mvc 184(8,15), 72(1)\n\t" \
5088 VALGRIND_CALL_NOREDIR_R1 \
5090 VALGRIND_CFI_EPILOGUE \
5092 : /*out*/ "=d" (_res) \
5093 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5094 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5096 lval = (__typeof__(lval)) _res; \
5099 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5100 arg6, arg7 ,arg8, arg9, arg10) \
5102 volatile OrigFn _orig = (orig); \
5103 volatile unsigned long _argvec[11]; \
5104 volatile unsigned long _res; \
5105 _argvec[0] = (unsigned long)_orig.nraddr; \
5106 _argvec[1] = (unsigned long)arg1; \
5107 _argvec[2] = (unsigned long)arg2; \
5108 _argvec[3] = (unsigned long)arg3; \
5109 _argvec[4] = (unsigned long)arg4; \
5110 _argvec[5] = (unsigned long)arg5; \
5111 _argvec[6] = (unsigned long)arg6; \
5112 _argvec[7] = (unsigned long)arg7; \
5113 _argvec[8] = (unsigned long)arg8; \
5114 _argvec[9] = (unsigned long)arg9; \
5115 _argvec[10] = (unsigned long)arg10; \
5117 VALGRIND_CFI_PROLOGUE \
5118 "aghi 15,-200\n\t" \
5124 "mvc 160(8,15), 48(1)\n\t" \
5125 "mvc 168(8,15), 56(1)\n\t" \
5126 "mvc 176(8,15), 64(1)\n\t" \
5127 "mvc 184(8,15), 72(1)\n\t" \
5128 "mvc 192(8,15), 80(1)\n\t" \
5130 VALGRIND_CALL_NOREDIR_R1 \
5132 VALGRIND_CFI_EPILOGUE \
5134 : /*out*/ "=d" (_res) \
5135 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5136 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5138 lval = (__typeof__(lval)) _res; \
5141 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5142 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5144 volatile OrigFn _orig = (orig); \
5145 volatile unsigned long _argvec[12]; \
5146 volatile unsigned long _res; \
5147 _argvec[0] = (unsigned long)_orig.nraddr; \
5148 _argvec[1] = (unsigned long)arg1; \
5149 _argvec[2] = (unsigned long)arg2; \
5150 _argvec[3] = (unsigned long)arg3; \
5151 _argvec[4] = (unsigned long)arg4; \
5152 _argvec[5] = (unsigned long)arg5; \
5153 _argvec[6] = (unsigned long)arg6; \
5154 _argvec[7] = (unsigned long)arg7; \
5155 _argvec[8] = (unsigned long)arg8; \
5156 _argvec[9] = (unsigned long)arg9; \
5157 _argvec[10] = (unsigned long)arg10; \
5158 _argvec[11] = (unsigned long)arg11; \
5160 VALGRIND_CFI_PROLOGUE \
5161 "aghi 15,-208\n\t" \
5167 "mvc 160(8,15), 48(1)\n\t" \
5168 "mvc 168(8,15), 56(1)\n\t" \
5169 "mvc 176(8,15), 64(1)\n\t" \
5170 "mvc 184(8,15), 72(1)\n\t" \
5171 "mvc 192(8,15), 80(1)\n\t" \
5172 "mvc 200(8,15), 88(1)\n\t" \
5174 VALGRIND_CALL_NOREDIR_R1 \
5176 VALGRIND_CFI_EPILOGUE \
5178 : /*out*/ "=d" (_res) \
5179 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5180 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5182 lval = (__typeof__(lval)) _res; \
5185 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5186 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5188 volatile OrigFn _orig = (orig); \
5189 volatile unsigned long _argvec[13]; \
5190 volatile unsigned long _res; \
5191 _argvec[0] = (unsigned long)_orig.nraddr; \
5192 _argvec[1] = (unsigned long)arg1; \
5193 _argvec[2] = (unsigned long)arg2; \
5194 _argvec[3] = (unsigned long)arg3; \
5195 _argvec[4] = (unsigned long)arg4; \
5196 _argvec[5] = (unsigned long)arg5; \
5197 _argvec[6] = (unsigned long)arg6; \
5198 _argvec[7] = (unsigned long)arg7; \
5199 _argvec[8] = (unsigned long)arg8; \
5200 _argvec[9] = (unsigned long)arg9; \
5201 _argvec[10] = (unsigned long)arg10; \
5202 _argvec[11] = (unsigned long)arg11; \
5203 _argvec[12] = (unsigned long)arg12; \
5205 VALGRIND_CFI_PROLOGUE \
5206 "aghi 15,-216\n\t" \
5212 "mvc 160(8,15), 48(1)\n\t" \
5213 "mvc 168(8,15), 56(1)\n\t" \
5214 "mvc 176(8,15), 64(1)\n\t" \
5215 "mvc 184(8,15), 72(1)\n\t" \
5216 "mvc 192(8,15), 80(1)\n\t" \
5217 "mvc 200(8,15), 88(1)\n\t" \
5218 "mvc 208(8,15), 96(1)\n\t" \
5220 VALGRIND_CALL_NOREDIR_R1 \
5222 VALGRIND_CFI_EPILOGUE \
5224 : /*out*/ "=d" (_res) \
5225 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5226 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5228 lval = (__typeof__(lval)) _res; \
5232 #endif /* PLAT_s390x_linux */
5234 /* ------------------------- mips32-linux ----------------------- */
5236 #if defined(PLAT_mips32_linux)
5238 /* These regs are trashed by the hidden call. */
5239 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5240 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5243 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5246 #define CALL_FN_W_v(lval, orig) \
5248 volatile OrigFn _orig = (orig); \
5249 volatile unsigned long _argvec[1]; \
5250 volatile unsigned long _res; \
5251 _argvec[0] = (unsigned long)_orig.nraddr; \
5253 "subu $29, $29, 8 \n\t" \
5254 "sw $28, 0($29) \n\t" \
5255 "sw $31, 4($29) \n\t" \
5256 "subu $29, $29, 16 \n\t" \
5257 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5258 VALGRIND_CALL_NOREDIR_T9 \
5259 "addu $29, $29, 16\n\t" \
5260 "lw $28, 0($29) \n\t" \
5261 "lw $31, 4($29) \n\t" \
5262 "addu $29, $29, 8 \n\t" \
5264 : /*out*/ "=r" (_res) \
5265 : /*in*/ "0" (&_argvec[0]) \
5266 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5268 lval = (__typeof__(lval)) _res; \
5271 #define CALL_FN_W_W(lval, orig, arg1) \
5273 volatile OrigFn _orig = (orig); \
5274 volatile unsigned long _argvec[2]; \
5275 volatile unsigned long _res; \
5276 _argvec[0] = (unsigned long)_orig.nraddr; \
5277 _argvec[1] = (unsigned long)(arg1); \
5279 "subu $29, $29, 8 \n\t" \
5280 "sw $28, 0($29) \n\t" \
5281 "sw $31, 4($29) \n\t" \
5282 "subu $29, $29, 16 \n\t" \
5283 "lw $4, 4(%1) \n\t" /* arg1*/ \
5284 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5285 VALGRIND_CALL_NOREDIR_T9 \
5286 "addu $29, $29, 16 \n\t" \
5287 "lw $28, 0($29) \n\t" \
5288 "lw $31, 4($29) \n\t" \
5289 "addu $29, $29, 8 \n\t" \
5291 : /*out*/ "=r" (_res) \
5292 : /*in*/ "0" (&_argvec[0]) \
5293 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5295 lval = (__typeof__(lval)) _res; \
5298 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5300 volatile OrigFn _orig = (orig); \
5301 volatile unsigned long _argvec[3]; \
5302 volatile unsigned long _res; \
5303 _argvec[0] = (unsigned long)_orig.nraddr; \
5304 _argvec[1] = (unsigned long)(arg1); \
5305 _argvec[2] = (unsigned long)(arg2); \
5307 "subu $29, $29, 8 \n\t" \
5308 "sw $28, 0($29) \n\t" \
5309 "sw $31, 4($29) \n\t" \
5310 "subu $29, $29, 16 \n\t" \
5311 "lw $4, 4(%1) \n\t" \
5312 "lw $5, 8(%1) \n\t" \
5313 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5314 VALGRIND_CALL_NOREDIR_T9 \
5315 "addu $29, $29, 16 \n\t" \
5316 "lw $28, 0($29) \n\t" \
5317 "lw $31, 4($29) \n\t" \
5318 "addu $29, $29, 8 \n\t" \
5320 : /*out*/ "=r" (_res) \
5321 : /*in*/ "0" (&_argvec[0]) \
5322 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5324 lval = (__typeof__(lval)) _res; \
5327 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5329 volatile OrigFn _orig = (orig); \
5330 volatile unsigned long _argvec[4]; \
5331 volatile unsigned long _res; \
5332 _argvec[0] = (unsigned long)_orig.nraddr; \
5333 _argvec[1] = (unsigned long)(arg1); \
5334 _argvec[2] = (unsigned long)(arg2); \
5335 _argvec[3] = (unsigned long)(arg3); \
5337 "subu $29, $29, 8 \n\t" \
5338 "sw $28, 0($29) \n\t" \
5339 "sw $31, 4($29) \n\t" \
5340 "subu $29, $29, 16 \n\t" \
5341 "lw $4, 4(%1) \n\t" \
5342 "lw $5, 8(%1) \n\t" \
5343 "lw $6, 12(%1) \n\t" \
5344 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5345 VALGRIND_CALL_NOREDIR_T9 \
5346 "addu $29, $29, 16 \n\t" \
5347 "lw $28, 0($29) \n\t" \
5348 "lw $31, 4($29) \n\t" \
5349 "addu $29, $29, 8 \n\t" \
5351 : /*out*/ "=r" (_res) \
5352 : /*in*/ "0" (&_argvec[0]) \
5353 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5355 lval = (__typeof__(lval)) _res; \
5358 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5360 volatile OrigFn _orig = (orig); \
5361 volatile unsigned long _argvec[5]; \
5362 volatile unsigned long _res; \
5363 _argvec[0] = (unsigned long)_orig.nraddr; \
5364 _argvec[1] = (unsigned long)(arg1); \
5365 _argvec[2] = (unsigned long)(arg2); \
5366 _argvec[3] = (unsigned long)(arg3); \
5367 _argvec[4] = (unsigned long)(arg4); \
5369 "subu $29, $29, 8 \n\t" \
5370 "sw $28, 0($29) \n\t" \
5371 "sw $31, 4($29) \n\t" \
5372 "subu $29, $29, 16 \n\t" \
5373 "lw $4, 4(%1) \n\t" \
5374 "lw $5, 8(%1) \n\t" \
5375 "lw $6, 12(%1) \n\t" \
5376 "lw $7, 16(%1) \n\t" \
5377 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5378 VALGRIND_CALL_NOREDIR_T9 \
5379 "addu $29, $29, 16 \n\t" \
5380 "lw $28, 0($29) \n\t" \
5381 "lw $31, 4($29) \n\t" \
5382 "addu $29, $29, 8 \n\t" \
5384 : /*out*/ "=r" (_res) \
5385 : /*in*/ "0" (&_argvec[0]) \
5386 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5388 lval = (__typeof__(lval)) _res; \
5391 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5393 volatile OrigFn _orig = (orig); \
5394 volatile unsigned long _argvec[6]; \
5395 volatile unsigned long _res; \
5396 _argvec[0] = (unsigned long)_orig.nraddr; \
5397 _argvec[1] = (unsigned long)(arg1); \
5398 _argvec[2] = (unsigned long)(arg2); \
5399 _argvec[3] = (unsigned long)(arg3); \
5400 _argvec[4] = (unsigned long)(arg4); \
5401 _argvec[5] = (unsigned long)(arg5); \
5403 "subu $29, $29, 8 \n\t" \
5404 "sw $28, 0($29) \n\t" \
5405 "sw $31, 4($29) \n\t" \
5406 "lw $4, 20(%1) \n\t" \
5407 "subu $29, $29, 24\n\t" \
5408 "sw $4, 16($29) \n\t" \
5409 "lw $4, 4(%1) \n\t" \
5410 "lw $5, 8(%1) \n\t" \
5411 "lw $6, 12(%1) \n\t" \
5412 "lw $7, 16(%1) \n\t" \
5413 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5414 VALGRIND_CALL_NOREDIR_T9 \
5415 "addu $29, $29, 24 \n\t" \
5416 "lw $28, 0($29) \n\t" \
5417 "lw $31, 4($29) \n\t" \
5418 "addu $29, $29, 8 \n\t" \
5420 : /*out*/ "=r" (_res) \
5421 : /*in*/ "0" (&_argvec[0]) \
5422 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5424 lval = (__typeof__(lval)) _res; \
5426 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5428 volatile OrigFn _orig = (orig); \
5429 volatile unsigned long _argvec[7]; \
5430 volatile unsigned long _res; \
5431 _argvec[0] = (unsigned long)_orig.nraddr; \
5432 _argvec[1] = (unsigned long)(arg1); \
5433 _argvec[2] = (unsigned long)(arg2); \
5434 _argvec[3] = (unsigned long)(arg3); \
5435 _argvec[4] = (unsigned long)(arg4); \
5436 _argvec[5] = (unsigned long)(arg5); \
5437 _argvec[6] = (unsigned long)(arg6); \
5439 "subu $29, $29, 8 \n\t" \
5440 "sw $28, 0($29) \n\t" \
5441 "sw $31, 4($29) \n\t" \
5442 "lw $4, 20(%1) \n\t" \
5443 "subu $29, $29, 32\n\t" \
5444 "sw $4, 16($29) \n\t" \
5445 "lw $4, 24(%1) \n\t" \
5447 "sw $4, 20($29) \n\t" \
5448 "lw $4, 4(%1) \n\t" \
5449 "lw $5, 8(%1) \n\t" \
5450 "lw $6, 12(%1) \n\t" \
5451 "lw $7, 16(%1) \n\t" \
5452 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5453 VALGRIND_CALL_NOREDIR_T9 \
5454 "addu $29, $29, 32 \n\t" \
5455 "lw $28, 0($29) \n\t" \
5456 "lw $31, 4($29) \n\t" \
5457 "addu $29, $29, 8 \n\t" \
5459 : /*out*/ "=r" (_res) \
5460 : /*in*/ "0" (&_argvec[0]) \
5461 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5463 lval = (__typeof__(lval)) _res; \
5466 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5469 volatile OrigFn _orig = (orig); \
5470 volatile unsigned long _argvec[8]; \
5471 volatile unsigned long _res; \
5472 _argvec[0] = (unsigned long)_orig.nraddr; \
5473 _argvec[1] = (unsigned long)(arg1); \
5474 _argvec[2] = (unsigned long)(arg2); \
5475 _argvec[3] = (unsigned long)(arg3); \
5476 _argvec[4] = (unsigned long)(arg4); \
5477 _argvec[5] = (unsigned long)(arg5); \
5478 _argvec[6] = (unsigned long)(arg6); \
5479 _argvec[7] = (unsigned long)(arg7); \
5481 "subu $29, $29, 8 \n\t" \
5482 "sw $28, 0($29) \n\t" \
5483 "sw $31, 4($29) \n\t" \
5484 "lw $4, 20(%1) \n\t" \
5485 "subu $29, $29, 32\n\t" \
5486 "sw $4, 16($29) \n\t" \
5487 "lw $4, 24(%1) \n\t" \
5488 "sw $4, 20($29) \n\t" \
5489 "lw $4, 28(%1) \n\t" \
5490 "sw $4, 24($29) \n\t" \
5491 "lw $4, 4(%1) \n\t" \
5492 "lw $5, 8(%1) \n\t" \
5493 "lw $6, 12(%1) \n\t" \
5494 "lw $7, 16(%1) \n\t" \
5495 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5496 VALGRIND_CALL_NOREDIR_T9 \
5497 "addu $29, $29, 32 \n\t" \
5498 "lw $28, 0($29) \n\t" \
5499 "lw $31, 4($29) \n\t" \
5500 "addu $29, $29, 8 \n\t" \
5502 : /*out*/ "=r" (_res) \
5503 : /*in*/ "0" (&_argvec[0]) \
5504 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5506 lval = (__typeof__(lval)) _res; \
5509 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5512 volatile OrigFn _orig = (orig); \
5513 volatile unsigned long _argvec[9]; \
5514 volatile unsigned long _res; \
5515 _argvec[0] = (unsigned long)_orig.nraddr; \
5516 _argvec[1] = (unsigned long)(arg1); \
5517 _argvec[2] = (unsigned long)(arg2); \
5518 _argvec[3] = (unsigned long)(arg3); \
5519 _argvec[4] = (unsigned long)(arg4); \
5520 _argvec[5] = (unsigned long)(arg5); \
5521 _argvec[6] = (unsigned long)(arg6); \
5522 _argvec[7] = (unsigned long)(arg7); \
5523 _argvec[8] = (unsigned long)(arg8); \
5525 "subu $29, $29, 8 \n\t" \
5526 "sw $28, 0($29) \n\t" \
5527 "sw $31, 4($29) \n\t" \
5528 "lw $4, 20(%1) \n\t" \
5529 "subu $29, $29, 40\n\t" \
5530 "sw $4, 16($29) \n\t" \
5531 "lw $4, 24(%1) \n\t" \
5532 "sw $4, 20($29) \n\t" \
5533 "lw $4, 28(%1) \n\t" \
5534 "sw $4, 24($29) \n\t" \
5535 "lw $4, 32(%1) \n\t" \
5536 "sw $4, 28($29) \n\t" \
5537 "lw $4, 4(%1) \n\t" \
5538 "lw $5, 8(%1) \n\t" \
5539 "lw $6, 12(%1) \n\t" \
5540 "lw $7, 16(%1) \n\t" \
5541 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5542 VALGRIND_CALL_NOREDIR_T9 \
5543 "addu $29, $29, 40 \n\t" \
5544 "lw $28, 0($29) \n\t" \
5545 "lw $31, 4($29) \n\t" \
5546 "addu $29, $29, 8 \n\t" \
5548 : /*out*/ "=r" (_res) \
5549 : /*in*/ "0" (&_argvec[0]) \
5550 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5552 lval = (__typeof__(lval)) _res; \
5555 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5558 volatile OrigFn _orig = (orig); \
5559 volatile unsigned long _argvec[10]; \
5560 volatile unsigned long _res; \
5561 _argvec[0] = (unsigned long)_orig.nraddr; \
5562 _argvec[1] = (unsigned long)(arg1); \
5563 _argvec[2] = (unsigned long)(arg2); \
5564 _argvec[3] = (unsigned long)(arg3); \
5565 _argvec[4] = (unsigned long)(arg4); \
5566 _argvec[5] = (unsigned long)(arg5); \
5567 _argvec[6] = (unsigned long)(arg6); \
5568 _argvec[7] = (unsigned long)(arg7); \
5569 _argvec[8] = (unsigned long)(arg8); \
5570 _argvec[9] = (unsigned long)(arg9); \
5572 "subu $29, $29, 8 \n\t" \
5573 "sw $28, 0($29) \n\t" \
5574 "sw $31, 4($29) \n\t" \
5575 "lw $4, 20(%1) \n\t" \
5576 "subu $29, $29, 40\n\t" \
5577 "sw $4, 16($29) \n\t" \
5578 "lw $4, 24(%1) \n\t" \
5579 "sw $4, 20($29) \n\t" \
5580 "lw $4, 28(%1) \n\t" \
5581 "sw $4, 24($29) \n\t" \
5582 "lw $4, 32(%1) \n\t" \
5583 "sw $4, 28($29) \n\t" \
5584 "lw $4, 36(%1) \n\t" \
5585 "sw $4, 32($29) \n\t" \
5586 "lw $4, 4(%1) \n\t" \
5587 "lw $5, 8(%1) \n\t" \
5588 "lw $6, 12(%1) \n\t" \
5589 "lw $7, 16(%1) \n\t" \
5590 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5591 VALGRIND_CALL_NOREDIR_T9 \
5592 "addu $29, $29, 40 \n\t" \
5593 "lw $28, 0($29) \n\t" \
5594 "lw $31, 4($29) \n\t" \
5595 "addu $29, $29, 8 \n\t" \
5597 : /*out*/ "=r" (_res) \
5598 : /*in*/ "0" (&_argvec[0]) \
5599 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5601 lval = (__typeof__(lval)) _res; \
5604 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5605 arg7,arg8,arg9,arg10) \
5607 volatile OrigFn _orig = (orig); \
5608 volatile unsigned long _argvec[11]; \
5609 volatile unsigned long _res; \
5610 _argvec[0] = (unsigned long)_orig.nraddr; \
5611 _argvec[1] = (unsigned long)(arg1); \
5612 _argvec[2] = (unsigned long)(arg2); \
5613 _argvec[3] = (unsigned long)(arg3); \
5614 _argvec[4] = (unsigned long)(arg4); \
5615 _argvec[5] = (unsigned long)(arg5); \
5616 _argvec[6] = (unsigned long)(arg6); \
5617 _argvec[7] = (unsigned long)(arg7); \
5618 _argvec[8] = (unsigned long)(arg8); \
5619 _argvec[9] = (unsigned long)(arg9); \
5620 _argvec[10] = (unsigned long)(arg10); \
5622 "subu $29, $29, 8 \n\t" \
5623 "sw $28, 0($29) \n\t" \
5624 "sw $31, 4($29) \n\t" \
5625 "lw $4, 20(%1) \n\t" \
5626 "subu $29, $29, 48\n\t" \
5627 "sw $4, 16($29) \n\t" \
5628 "lw $4, 24(%1) \n\t" \
5629 "sw $4, 20($29) \n\t" \
5630 "lw $4, 28(%1) \n\t" \
5631 "sw $4, 24($29) \n\t" \
5632 "lw $4, 32(%1) \n\t" \
5633 "sw $4, 28($29) \n\t" \
5634 "lw $4, 36(%1) \n\t" \
5635 "sw $4, 32($29) \n\t" \
5636 "lw $4, 40(%1) \n\t" \
5637 "sw $4, 36($29) \n\t" \
5638 "lw $4, 4(%1) \n\t" \
5639 "lw $5, 8(%1) \n\t" \
5640 "lw $6, 12(%1) \n\t" \
5641 "lw $7, 16(%1) \n\t" \
5642 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5643 VALGRIND_CALL_NOREDIR_T9 \
5644 "addu $29, $29, 48 \n\t" \
5645 "lw $28, 0($29) \n\t" \
5646 "lw $31, 4($29) \n\t" \
5647 "addu $29, $29, 8 \n\t" \
5649 : /*out*/ "=r" (_res) \
5650 : /*in*/ "0" (&_argvec[0]) \
5651 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5653 lval = (__typeof__(lval)) _res; \
5656 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5657 arg6,arg7,arg8,arg9,arg10, \
5660 volatile OrigFn _orig = (orig); \
5661 volatile unsigned long _argvec[12]; \
5662 volatile unsigned long _res; \
5663 _argvec[0] = (unsigned long)_orig.nraddr; \
5664 _argvec[1] = (unsigned long)(arg1); \
5665 _argvec[2] = (unsigned long)(arg2); \
5666 _argvec[3] = (unsigned long)(arg3); \
5667 _argvec[4] = (unsigned long)(arg4); \
5668 _argvec[5] = (unsigned long)(arg5); \
5669 _argvec[6] = (unsigned long)(arg6); \
5670 _argvec[7] = (unsigned long)(arg7); \
5671 _argvec[8] = (unsigned long)(arg8); \
5672 _argvec[9] = (unsigned long)(arg9); \
5673 _argvec[10] = (unsigned long)(arg10); \
5674 _argvec[11] = (unsigned long)(arg11); \
5676 "subu $29, $29, 8 \n\t" \
5677 "sw $28, 0($29) \n\t" \
5678 "sw $31, 4($29) \n\t" \
5679 "lw $4, 20(%1) \n\t" \
5680 "subu $29, $29, 48\n\t" \
5681 "sw $4, 16($29) \n\t" \
5682 "lw $4, 24(%1) \n\t" \
5683 "sw $4, 20($29) \n\t" \
5684 "lw $4, 28(%1) \n\t" \
5685 "sw $4, 24($29) \n\t" \
5686 "lw $4, 32(%1) \n\t" \
5687 "sw $4, 28($29) \n\t" \
5688 "lw $4, 36(%1) \n\t" \
5689 "sw $4, 32($29) \n\t" \
5690 "lw $4, 40(%1) \n\t" \
5691 "sw $4, 36($29) \n\t" \
5692 "lw $4, 44(%1) \n\t" \
5693 "sw $4, 40($29) \n\t" \
5694 "lw $4, 4(%1) \n\t" \
5695 "lw $5, 8(%1) \n\t" \
5696 "lw $6, 12(%1) \n\t" \
5697 "lw $7, 16(%1) \n\t" \
5698 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5699 VALGRIND_CALL_NOREDIR_T9 \
5700 "addu $29, $29, 48 \n\t" \
5701 "lw $28, 0($29) \n\t" \
5702 "lw $31, 4($29) \n\t" \
5703 "addu $29, $29, 8 \n\t" \
5705 : /*out*/ "=r" (_res) \
5706 : /*in*/ "0" (&_argvec[0]) \
5707 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5709 lval = (__typeof__(lval)) _res; \
5712 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5713 arg6,arg7,arg8,arg9,arg10, \
5716 volatile OrigFn _orig = (orig); \
5717 volatile unsigned long _argvec[13]; \
5718 volatile unsigned long _res; \
5719 _argvec[0] = (unsigned long)_orig.nraddr; \
5720 _argvec[1] = (unsigned long)(arg1); \
5721 _argvec[2] = (unsigned long)(arg2); \
5722 _argvec[3] = (unsigned long)(arg3); \
5723 _argvec[4] = (unsigned long)(arg4); \
5724 _argvec[5] = (unsigned long)(arg5); \
5725 _argvec[6] = (unsigned long)(arg6); \
5726 _argvec[7] = (unsigned long)(arg7); \
5727 _argvec[8] = (unsigned long)(arg8); \
5728 _argvec[9] = (unsigned long)(arg9); \
5729 _argvec[10] = (unsigned long)(arg10); \
5730 _argvec[11] = (unsigned long)(arg11); \
5731 _argvec[12] = (unsigned long)(arg12); \
5733 "subu $29, $29, 8 \n\t" \
5734 "sw $28, 0($29) \n\t" \
5735 "sw $31, 4($29) \n\t" \
5736 "lw $4, 20(%1) \n\t" \
5737 "subu $29, $29, 56\n\t" \
5738 "sw $4, 16($29) \n\t" \
5739 "lw $4, 24(%1) \n\t" \
5740 "sw $4, 20($29) \n\t" \
5741 "lw $4, 28(%1) \n\t" \
5742 "sw $4, 24($29) \n\t" \
5743 "lw $4, 32(%1) \n\t" \
5744 "sw $4, 28($29) \n\t" \
5745 "lw $4, 36(%1) \n\t" \
5746 "sw $4, 32($29) \n\t" \
5747 "lw $4, 40(%1) \n\t" \
5748 "sw $4, 36($29) \n\t" \
5749 "lw $4, 44(%1) \n\t" \
5750 "sw $4, 40($29) \n\t" \
5751 "lw $4, 48(%1) \n\t" \
5752 "sw $4, 44($29) \n\t" \
5753 "lw $4, 4(%1) \n\t" \
5754 "lw $5, 8(%1) \n\t" \
5755 "lw $6, 12(%1) \n\t" \
5756 "lw $7, 16(%1) \n\t" \
5757 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5758 VALGRIND_CALL_NOREDIR_T9 \
5759 "addu $29, $29, 56 \n\t" \
5760 "lw $28, 0($29) \n\t" \
5761 "lw $31, 4($29) \n\t" \
5762 "addu $29, $29, 8 \n\t" \
5764 : /*out*/ "=r" (_res) \
5765 : /*in*/ "r" (&_argvec[0]) \
5766 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5768 lval = (__typeof__(lval)) _res; \
5771 #endif /* PLAT_mips32_linux */
5773 /* ------------------------- nanomips-linux -------------------- */
5775 #if defined(PLAT_nanomips_linux)
5777 /* These regs are trashed by the hidden call. */
5778 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5779 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5782 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5785 #define CALL_FN_W_v(lval, orig) \
5787 volatile OrigFn _orig = (orig); \
5788 volatile unsigned long _argvec[1]; \
5789 volatile unsigned long _res; \
5790 _argvec[0] = (unsigned long)_orig.nraddr; \
5792 "lw $t9, 0(%1)\n\t" \
5793 VALGRIND_CALL_NOREDIR_T9 \
5795 : /*out*/ "=r" (_res) \
5796 : /*in*/ "r" (&_argvec[0]) \
5797 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5799 lval = (__typeof__(lval)) _res; \
5802 #define CALL_FN_W_W(lval, orig, arg1) \
5804 volatile OrigFn _orig = (orig); \
5805 volatile unsigned long _argvec[2]; \
5806 volatile unsigned long _res; \
5807 _argvec[0] = (unsigned long)_orig.nraddr; \
5808 _argvec[1] = (unsigned long)(arg1); \
5810 "lw $t9, 0(%1)\n\t" \
5811 "lw $a0, 4(%1)\n\t" \
5812 VALGRIND_CALL_NOREDIR_T9 \
5814 : /*out*/ "=r" (_res) \
5815 : /*in*/ "r" (&_argvec[0]) \
5816 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5818 lval = (__typeof__(lval)) _res; \
5821 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5823 volatile OrigFn _orig = (orig); \
5824 volatile unsigned long _argvec[3]; \
5825 volatile unsigned long _res; \
5826 _argvec[0] = (unsigned long)_orig.nraddr; \
5827 _argvec[1] = (unsigned long)(arg1); \
5828 _argvec[2] = (unsigned long)(arg2); \
5830 "lw $t9, 0(%1)\n\t" \
5831 "lw $a0, 4(%1)\n\t" \
5832 "lw $a1, 8(%1)\n\t" \
5833 VALGRIND_CALL_NOREDIR_T9 \
5835 : /*out*/ "=r" (_res) \
5836 : /*in*/ "r" (&_argvec[0]) \
5837 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5839 lval = (__typeof__(lval)) _res; \
5842 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5844 volatile OrigFn _orig = (orig); \
5845 volatile unsigned long _argvec[4]; \
5846 volatile unsigned long _res; \
5847 _argvec[0] = (unsigned long)_orig.nraddr; \
5848 _argvec[1] = (unsigned long)(arg1); \
5849 _argvec[2] = (unsigned long)(arg2); \
5850 _argvec[3] = (unsigned long)(arg3); \
5852 "lw $t9, 0(%1)\n\t" \
5853 "lw $a0, 4(%1)\n\t" \
5854 "lw $a1, 8(%1)\n\t" \
5855 "lw $a2,12(%1)\n\t" \
5856 VALGRIND_CALL_NOREDIR_T9 \
5858 : /*out*/ "=r" (_res) \
5859 : /*in*/ "r" (&_argvec[0]) \
5860 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5862 lval = (__typeof__(lval)) _res; \
5865 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5867 volatile OrigFn _orig = (orig); \
5868 volatile unsigned long _argvec[5]; \
5869 volatile unsigned long _res; \
5870 _argvec[0] = (unsigned long)_orig.nraddr; \
5871 _argvec[1] = (unsigned long)(arg1); \
5872 _argvec[2] = (unsigned long)(arg2); \
5873 _argvec[3] = (unsigned long)(arg3); \
5874 _argvec[4] = (unsigned long)(arg4); \
5876 "lw $t9, 0(%1)\n\t" \
5877 "lw $a0, 4(%1)\n\t" \
5878 "lw $a1, 8(%1)\n\t" \
5879 "lw $a2,12(%1)\n\t" \
5880 "lw $a3,16(%1)\n\t" \
5881 VALGRIND_CALL_NOREDIR_T9 \
5883 : /*out*/ "=r" (_res) \
5884 : /*in*/ "r" (&_argvec[0]) \
5885 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5887 lval = (__typeof__(lval)) _res; \
5890 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5892 volatile OrigFn _orig = (orig); \
5893 volatile unsigned long _argvec[6]; \
5894 volatile unsigned long _res; \
5895 _argvec[0] = (unsigned long)_orig.nraddr; \
5896 _argvec[1] = (unsigned long)(arg1); \
5897 _argvec[2] = (unsigned long)(arg2); \
5898 _argvec[3] = (unsigned long)(arg3); \
5899 _argvec[4] = (unsigned long)(arg4); \
5900 _argvec[5] = (unsigned long)(arg5); \
5902 "lw $t9, 0(%1)\n\t" \
5903 "lw $a0, 4(%1)\n\t" \
5904 "lw $a1, 8(%1)\n\t" \
5905 "lw $a2,12(%1)\n\t" \
5906 "lw $a3,16(%1)\n\t" \
5907 "lw $a4,20(%1)\n\t" \
5908 VALGRIND_CALL_NOREDIR_T9 \
5910 : /*out*/ "=r" (_res) \
5911 : /*in*/ "r" (&_argvec[0]) \
5912 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5914 lval = (__typeof__(lval)) _res; \
5916 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5918 volatile OrigFn _orig = (orig); \
5919 volatile unsigned long _argvec[7]; \
5920 volatile unsigned long _res; \
5921 _argvec[0] = (unsigned long)_orig.nraddr; \
5922 _argvec[1] = (unsigned long)(arg1); \
5923 _argvec[2] = (unsigned long)(arg2); \
5924 _argvec[3] = (unsigned long)(arg3); \
5925 _argvec[4] = (unsigned long)(arg4); \
5926 _argvec[5] = (unsigned long)(arg5); \
5927 _argvec[6] = (unsigned long)(arg6); \
5929 "lw $t9, 0(%1)\n\t" \
5930 "lw $a0, 4(%1)\n\t" \
5931 "lw $a1, 8(%1)\n\t" \
5932 "lw $a2,12(%1)\n\t" \
5933 "lw $a3,16(%1)\n\t" \
5934 "lw $a4,20(%1)\n\t" \
5935 "lw $a5,24(%1)\n\t" \
5936 VALGRIND_CALL_NOREDIR_T9 \
5938 : /*out*/ "=r" (_res) \
5939 : /*in*/ "r" (&_argvec[0]) \
5940 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5942 lval = (__typeof__(lval)) _res; \
5945 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5948 volatile OrigFn _orig = (orig); \
5949 volatile unsigned long _argvec[8]; \
5950 volatile unsigned long _res; \
5951 _argvec[0] = (unsigned long)_orig.nraddr; \
5952 _argvec[1] = (unsigned long)(arg1); \
5953 _argvec[2] = (unsigned long)(arg2); \
5954 _argvec[3] = (unsigned long)(arg3); \
5955 _argvec[4] = (unsigned long)(arg4); \
5956 _argvec[5] = (unsigned long)(arg5); \
5957 _argvec[6] = (unsigned long)(arg6); \
5958 _argvec[7] = (unsigned long)(arg7); \
5960 "lw $t9, 0(%1)\n\t" \
5961 "lw $a0, 4(%1)\n\t" \
5962 "lw $a1, 8(%1)\n\t" \
5963 "lw $a2,12(%1)\n\t" \
5964 "lw $a3,16(%1)\n\t" \
5965 "lw $a4,20(%1)\n\t" \
5966 "lw $a5,24(%1)\n\t" \
5967 "lw $a6,28(%1)\n\t" \
5968 VALGRIND_CALL_NOREDIR_T9 \
5970 : /*out*/ "=r" (_res) \
5971 : /*in*/ "r" (&_argvec[0]) \
5972 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5974 lval = (__typeof__(lval)) _res; \
5977 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5980 volatile OrigFn _orig = (orig); \
5981 volatile unsigned long _argvec[9]; \
5982 volatile unsigned long _res; \
5983 _argvec[0] = (unsigned long)_orig.nraddr; \
5984 _argvec[1] = (unsigned long)(arg1); \
5985 _argvec[2] = (unsigned long)(arg2); \
5986 _argvec[3] = (unsigned long)(arg3); \
5987 _argvec[4] = (unsigned long)(arg4); \
5988 _argvec[5] = (unsigned long)(arg5); \
5989 _argvec[6] = (unsigned long)(arg6); \
5990 _argvec[7] = (unsigned long)(arg7); \
5991 _argvec[8] = (unsigned long)(arg8); \
5993 "lw $t9, 0(%1)\n\t" \
5994 "lw $a0, 4(%1)\n\t" \
5995 "lw $a1, 8(%1)\n\t" \
5996 "lw $a2,12(%1)\n\t" \
5997 "lw $a3,16(%1)\n\t" \
5998 "lw $a4,20(%1)\n\t" \
5999 "lw $a5,24(%1)\n\t" \
6000 "lw $a6,28(%1)\n\t" \
6001 "lw $a7,32(%1)\n\t" \
6002 VALGRIND_CALL_NOREDIR_T9 \
6004 : /*out*/ "=r" (_res) \
6005 : /*in*/ "r" (&_argvec[0]) \
6006 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6008 lval = (__typeof__(lval)) _res; \
6011 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6014 volatile OrigFn _orig = (orig); \
6015 volatile unsigned long _argvec[10]; \
6016 volatile unsigned long _res; \
6017 _argvec[0] = (unsigned long)_orig.nraddr; \
6018 _argvec[1] = (unsigned long)(arg1); \
6019 _argvec[2] = (unsigned long)(arg2); \
6020 _argvec[3] = (unsigned long)(arg3); \
6021 _argvec[4] = (unsigned long)(arg4); \
6022 _argvec[5] = (unsigned long)(arg5); \
6023 _argvec[6] = (unsigned long)(arg6); \
6024 _argvec[7] = (unsigned long)(arg7); \
6025 _argvec[8] = (unsigned long)(arg8); \
6026 _argvec[9] = (unsigned long)(arg9); \
6028 "addiu $sp, $sp, -16 \n\t" \
6029 "lw $t9,36(%1) \n\t" \
6030 "sw $t9, 0($sp) \n\t" \
6031 "lw $t9, 0(%1) \n\t" \
6032 "lw $a0, 4(%1) \n\t" \
6033 "lw $a1, 8(%1) \n\t" \
6034 "lw $a2,12(%1) \n\t" \
6035 "lw $a3,16(%1) \n\t" \
6036 "lw $a4,20(%1) \n\t" \
6037 "lw $a5,24(%1) \n\t" \
6038 "lw $a6,28(%1) \n\t" \
6039 "lw $a7,32(%1) \n\t" \
6040 VALGRIND_CALL_NOREDIR_T9 \
6041 "move %0, $a0 \n\t" \
6042 "addiu $sp, $sp, 16 \n\t" \
6043 : /*out*/ "=r" (_res) \
6044 : /*in*/ "r" (&_argvec[0]) \
6045 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6047 lval = (__typeof__(lval)) _res; \
6050 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6051 arg7,arg8,arg9,arg10) \
6053 volatile OrigFn _orig = (orig); \
6054 volatile unsigned long _argvec[11]; \
6055 volatile unsigned long _res; \
6056 _argvec[0] = (unsigned long)_orig.nraddr; \
6057 _argvec[1] = (unsigned long)(arg1); \
6058 _argvec[2] = (unsigned long)(arg2); \
6059 _argvec[3] = (unsigned long)(arg3); \
6060 _argvec[4] = (unsigned long)(arg4); \
6061 _argvec[5] = (unsigned long)(arg5); \
6062 _argvec[6] = (unsigned long)(arg6); \
6063 _argvec[7] = (unsigned long)(arg7); \
6064 _argvec[8] = (unsigned long)(arg8); \
6065 _argvec[9] = (unsigned long)(arg9); \
6066 _argvec[10] = (unsigned long)(arg10); \
6068 "addiu $sp, $sp, -16 \n\t" \
6069 "lw $t9,36(%1) \n\t" \
6070 "sw $t9, 0($sp) \n\t" \
6071 "lw $t9,40(%1) \n\t" \
6072 "sw $t9, 4($sp) \n\t" \
6073 "lw $t9, 0(%1) \n\t" \
6074 "lw $a0, 4(%1) \n\t" \
6075 "lw $a1, 8(%1) \n\t" \
6076 "lw $a2,12(%1) \n\t" \
6077 "lw $a3,16(%1) \n\t" \
6078 "lw $a4,20(%1) \n\t" \
6079 "lw $a5,24(%1) \n\t" \
6080 "lw $a6,28(%1) \n\t" \
6081 "lw $a7,32(%1) \n\t" \
6082 VALGRIND_CALL_NOREDIR_T9 \
6083 "move %0, $a0 \n\t" \
6084 "addiu $sp, $sp, 16 \n\t" \
6085 : /*out*/ "=r" (_res) \
6086 : /*in*/ "r" (&_argvec[0]) \
6087 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6089 lval = (__typeof__(lval)) _res; \
6092 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6093 arg6,arg7,arg8,arg9,arg10, \
6096 volatile OrigFn _orig = (orig); \
6097 volatile unsigned long _argvec[12]; \
6098 volatile unsigned long _res; \
6099 _argvec[0] = (unsigned long)_orig.nraddr; \
6100 _argvec[1] = (unsigned long)(arg1); \
6101 _argvec[2] = (unsigned long)(arg2); \
6102 _argvec[3] = (unsigned long)(arg3); \
6103 _argvec[4] = (unsigned long)(arg4); \
6104 _argvec[5] = (unsigned long)(arg5); \
6105 _argvec[6] = (unsigned long)(arg6); \
6106 _argvec[7] = (unsigned long)(arg7); \
6107 _argvec[8] = (unsigned long)(arg8); \
6108 _argvec[9] = (unsigned long)(arg9); \
6109 _argvec[10] = (unsigned long)(arg10); \
6110 _argvec[11] = (unsigned long)(arg11); \
6112 "addiu $sp, $sp, -16 \n\t" \
6113 "lw $t9,36(%1) \n\t" \
6114 "sw $t9, 0($sp) \n\t" \
6115 "lw $t9,40(%1) \n\t" \
6116 "sw $t9, 4($sp) \n\t" \
6117 "lw $t9,44(%1) \n\t" \
6118 "sw $t9, 8($sp) \n\t" \
6119 "lw $t9, 0(%1) \n\t" \
6120 "lw $a0, 4(%1) \n\t" \
6121 "lw $a1, 8(%1) \n\t" \
6122 "lw $a2,12(%1) \n\t" \
6123 "lw $a3,16(%1) \n\t" \
6124 "lw $a4,20(%1) \n\t" \
6125 "lw $a5,24(%1) \n\t" \
6126 "lw $a6,28(%1) \n\t" \
6127 "lw $a7,32(%1) \n\t" \
6128 VALGRIND_CALL_NOREDIR_T9 \
6129 "move %0, $a0 \n\t" \
6130 "addiu $sp, $sp, 16 \n\t" \
6131 : /*out*/ "=r" (_res) \
6132 : /*in*/ "r" (&_argvec[0]) \
6133 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6135 lval = (__typeof__(lval)) _res; \
6138 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6139 arg6,arg7,arg8,arg9,arg10, \
6142 volatile OrigFn _orig = (orig); \
6143 volatile unsigned long _argvec[13]; \
6144 volatile unsigned long _res; \
6145 _argvec[0] = (unsigned long)_orig.nraddr; \
6146 _argvec[1] = (unsigned long)(arg1); \
6147 _argvec[2] = (unsigned long)(arg2); \
6148 _argvec[3] = (unsigned long)(arg3); \
6149 _argvec[4] = (unsigned long)(arg4); \
6150 _argvec[5] = (unsigned long)(arg5); \
6151 _argvec[6] = (unsigned long)(arg6); \
6152 _argvec[7] = (unsigned long)(arg7); \
6153 _argvec[8] = (unsigned long)(arg8); \
6154 _argvec[9] = (unsigned long)(arg9); \
6155 _argvec[10] = (unsigned long)(arg10); \
6156 _argvec[11] = (unsigned long)(arg11); \
6157 _argvec[12] = (unsigned long)(arg12); \
6159 "addiu $sp, $sp, -16 \n\t" \
6160 "lw $t9,36(%1) \n\t" \
6161 "sw $t9, 0($sp) \n\t" \
6162 "lw $t9,40(%1) \n\t" \
6163 "sw $t9, 4($sp) \n\t" \
6164 "lw $t9,44(%1) \n\t" \
6165 "sw $t9, 8($sp) \n\t" \
6166 "lw $t9,48(%1) \n\t" \
6167 "sw $t9,12($sp) \n\t" \
6168 "lw $t9, 0(%1) \n\t" \
6169 "lw $a0, 4(%1) \n\t" \
6170 "lw $a1, 8(%1) \n\t" \
6171 "lw $a2,12(%1) \n\t" \
6172 "lw $a3,16(%1) \n\t" \
6173 "lw $a4,20(%1) \n\t" \
6174 "lw $a5,24(%1) \n\t" \
6175 "lw $a6,28(%1) \n\t" \
6176 "lw $a7,32(%1) \n\t" \
6177 VALGRIND_CALL_NOREDIR_T9 \
6178 "move %0, $a0 \n\t" \
6179 "addiu $sp, $sp, 16 \n\t" \
6180 : /*out*/ "=r" (_res) \
6181 : /*in*/ "r" (&_argvec[0]) \
6182 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6184 lval = (__typeof__(lval)) _res; \
6187 #endif /* PLAT_nanomips_linux */
6189 /* ------------------------- mips64-linux ------------------------- */
6191 #if defined(PLAT_mips64_linux)
6193 /* These regs are trashed by the hidden call. */
6194 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6195 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6198 /* These CALL_FN_ macros assume that on mips64-linux,
6199 sizeof(long long) == 8. */
6201 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6203 #define CALL_FN_W_v(lval, orig) \
6205 volatile OrigFn _orig = (orig); \
6206 volatile unsigned long long _argvec[1]; \
6207 volatile unsigned long long _res; \
6208 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6210 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6211 VALGRIND_CALL_NOREDIR_T9 \
6213 : /*out*/ "=r" (_res) \
6214 : /*in*/ "0" (&_argvec[0]) \
6215 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6217 lval = (__typeof__(lval)) (long)_res; \
6220 #define CALL_FN_W_W(lval, orig, arg1) \
6222 volatile OrigFn _orig = (orig); \
6223 volatile unsigned long long _argvec[2]; \
6224 volatile unsigned long long _res; \
6225 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6226 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6228 "ld $4, 8(%1)\n\t" /* arg1*/ \
6229 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6230 VALGRIND_CALL_NOREDIR_T9 \
6232 : /*out*/ "=r" (_res) \
6233 : /*in*/ "r" (&_argvec[0]) \
6234 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6236 lval = (__typeof__(lval)) (long)_res; \
6239 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6241 volatile OrigFn _orig = (orig); \
6242 volatile unsigned long long _argvec[3]; \
6243 volatile unsigned long long _res; \
6244 _argvec[0] = _orig.nraddr; \
6245 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6246 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6248 "ld $4, 8(%1)\n\t" \
6249 "ld $5, 16(%1)\n\t" \
6250 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6251 VALGRIND_CALL_NOREDIR_T9 \
6253 : /*out*/ "=r" (_res) \
6254 : /*in*/ "r" (&_argvec[0]) \
6255 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6257 lval = (__typeof__(lval)) (long)_res; \
6261 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6263 volatile OrigFn _orig = (orig); \
6264 volatile unsigned long long _argvec[4]; \
6265 volatile unsigned long long _res; \
6266 _argvec[0] = _orig.nraddr; \
6267 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6268 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6269 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6271 "ld $4, 8(%1)\n\t" \
6272 "ld $5, 16(%1)\n\t" \
6273 "ld $6, 24(%1)\n\t" \
6274 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6275 VALGRIND_CALL_NOREDIR_T9 \
6277 : /*out*/ "=r" (_res) \
6278 : /*in*/ "r" (&_argvec[0]) \
6279 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6281 lval = (__typeof__(lval)) (long)_res; \
6284 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6286 volatile OrigFn _orig = (orig); \
6287 volatile unsigned long long _argvec[5]; \
6288 volatile unsigned long long _res; \
6289 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6290 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6291 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6292 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6293 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6295 "ld $4, 8(%1)\n\t" \
6296 "ld $5, 16(%1)\n\t" \
6297 "ld $6, 24(%1)\n\t" \
6298 "ld $7, 32(%1)\n\t" \
6299 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6300 VALGRIND_CALL_NOREDIR_T9 \
6302 : /*out*/ "=r" (_res) \
6303 : /*in*/ "r" (&_argvec[0]) \
6304 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6306 lval = (__typeof__(lval)) (long)_res; \
6309 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6311 volatile OrigFn _orig = (orig); \
6312 volatile unsigned long long _argvec[6]; \
6313 volatile unsigned long long _res; \
6314 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6315 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6316 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6317 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6318 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6319 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6321 "ld $4, 8(%1)\n\t" \
6322 "ld $5, 16(%1)\n\t" \
6323 "ld $6, 24(%1)\n\t" \
6324 "ld $7, 32(%1)\n\t" \
6325 "ld $8, 40(%1)\n\t" \
6326 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6327 VALGRIND_CALL_NOREDIR_T9 \
6329 : /*out*/ "=r" (_res) \
6330 : /*in*/ "r" (&_argvec[0]) \
6331 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6333 lval = (__typeof__(lval)) (long)_res; \
6336 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6338 volatile OrigFn _orig = (orig); \
6339 volatile unsigned long long _argvec[7]; \
6340 volatile unsigned long long _res; \
6341 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6342 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6343 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6344 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6345 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6346 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6347 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6349 "ld $4, 8(%1)\n\t" \
6350 "ld $5, 16(%1)\n\t" \
6351 "ld $6, 24(%1)\n\t" \
6352 "ld $7, 32(%1)\n\t" \
6353 "ld $8, 40(%1)\n\t" \
6354 "ld $9, 48(%1)\n\t" \
6355 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6356 VALGRIND_CALL_NOREDIR_T9 \
6358 : /*out*/ "=r" (_res) \
6359 : /*in*/ "r" (&_argvec[0]) \
6360 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6362 lval = (__typeof__(lval)) (long)_res; \
6365 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6368 volatile OrigFn _orig = (orig); \
6369 volatile unsigned long long _argvec[8]; \
6370 volatile unsigned long long _res; \
6371 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6372 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6373 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6374 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6375 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6376 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6377 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6378 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6380 "ld $4, 8(%1)\n\t" \
6381 "ld $5, 16(%1)\n\t" \
6382 "ld $6, 24(%1)\n\t" \
6383 "ld $7, 32(%1)\n\t" \
6384 "ld $8, 40(%1)\n\t" \
6385 "ld $9, 48(%1)\n\t" \
6386 "ld $10, 56(%1)\n\t" \
6387 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6388 VALGRIND_CALL_NOREDIR_T9 \
6390 : /*out*/ "=r" (_res) \
6391 : /*in*/ "r" (&_argvec[0]) \
6392 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6394 lval = (__typeof__(lval)) (long)_res; \
6397 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6400 volatile OrigFn _orig = (orig); \
6401 volatile unsigned long long _argvec[9]; \
6402 volatile unsigned long long _res; \
6403 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6404 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6405 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6406 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6407 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6408 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6409 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6410 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6411 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6413 "ld $4, 8(%1)\n\t" \
6414 "ld $5, 16(%1)\n\t" \
6415 "ld $6, 24(%1)\n\t" \
6416 "ld $7, 32(%1)\n\t" \
6417 "ld $8, 40(%1)\n\t" \
6418 "ld $9, 48(%1)\n\t" \
6419 "ld $10, 56(%1)\n\t" \
6420 "ld $11, 64(%1)\n\t" \
6421 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6422 VALGRIND_CALL_NOREDIR_T9 \
6424 : /*out*/ "=r" (_res) \
6425 : /*in*/ "r" (&_argvec[0]) \
6426 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6428 lval = (__typeof__(lval)) (long)_res; \
6431 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6434 volatile OrigFn _orig = (orig); \
6435 volatile unsigned long long _argvec[10]; \
6436 volatile unsigned long long _res; \
6437 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6438 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6439 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6440 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6441 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6442 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6443 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6444 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6445 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6446 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6448 "dsubu $29, $29, 8\n\t" \
6449 "ld $4, 72(%1)\n\t" \
6450 "sd $4, 0($29)\n\t" \
6451 "ld $4, 8(%1)\n\t" \
6452 "ld $5, 16(%1)\n\t" \
6453 "ld $6, 24(%1)\n\t" \
6454 "ld $7, 32(%1)\n\t" \
6455 "ld $8, 40(%1)\n\t" \
6456 "ld $9, 48(%1)\n\t" \
6457 "ld $10, 56(%1)\n\t" \
6458 "ld $11, 64(%1)\n\t" \
6459 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6460 VALGRIND_CALL_NOREDIR_T9 \
6461 "daddu $29, $29, 8\n\t" \
6463 : /*out*/ "=r" (_res) \
6464 : /*in*/ "r" (&_argvec[0]) \
6465 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6467 lval = (__typeof__(lval)) (long)_res; \
6470 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6471 arg7,arg8,arg9,arg10) \
6473 volatile OrigFn _orig = (orig); \
6474 volatile unsigned long long _argvec[11]; \
6475 volatile unsigned long long _res; \
6476 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6477 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6478 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6479 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6480 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6481 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6482 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6483 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6484 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6485 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6486 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6488 "dsubu $29, $29, 16\n\t" \
6489 "ld $4, 72(%1)\n\t" \
6490 "sd $4, 0($29)\n\t" \
6491 "ld $4, 80(%1)\n\t" \
6492 "sd $4, 8($29)\n\t" \
6493 "ld $4, 8(%1)\n\t" \
6494 "ld $5, 16(%1)\n\t" \
6495 "ld $6, 24(%1)\n\t" \
6496 "ld $7, 32(%1)\n\t" \
6497 "ld $8, 40(%1)\n\t" \
6498 "ld $9, 48(%1)\n\t" \
6499 "ld $10, 56(%1)\n\t" \
6500 "ld $11, 64(%1)\n\t" \
6501 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6502 VALGRIND_CALL_NOREDIR_T9 \
6503 "daddu $29, $29, 16\n\t" \
6505 : /*out*/ "=r" (_res) \
6506 : /*in*/ "r" (&_argvec[0]) \
6507 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6509 lval = (__typeof__(lval)) (long)_res; \
6512 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6513 arg6,arg7,arg8,arg9,arg10, \
6516 volatile OrigFn _orig = (orig); \
6517 volatile unsigned long long _argvec[12]; \
6518 volatile unsigned long long _res; \
6519 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6520 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6521 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6522 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6523 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6524 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6525 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6526 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6527 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6528 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6529 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6530 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6532 "dsubu $29, $29, 24\n\t" \
6533 "ld $4, 72(%1)\n\t" \
6534 "sd $4, 0($29)\n\t" \
6535 "ld $4, 80(%1)\n\t" \
6536 "sd $4, 8($29)\n\t" \
6537 "ld $4, 88(%1)\n\t" \
6538 "sd $4, 16($29)\n\t" \
6539 "ld $4, 8(%1)\n\t" \
6540 "ld $5, 16(%1)\n\t" \
6541 "ld $6, 24(%1)\n\t" \
6542 "ld $7, 32(%1)\n\t" \
6543 "ld $8, 40(%1)\n\t" \
6544 "ld $9, 48(%1)\n\t" \
6545 "ld $10, 56(%1)\n\t" \
6546 "ld $11, 64(%1)\n\t" \
6547 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6548 VALGRIND_CALL_NOREDIR_T9 \
6549 "daddu $29, $29, 24\n\t" \
6551 : /*out*/ "=r" (_res) \
6552 : /*in*/ "r" (&_argvec[0]) \
6553 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6555 lval = (__typeof__(lval)) (long)_res; \
6558 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6559 arg6,arg7,arg8,arg9,arg10, \
6562 volatile OrigFn _orig = (orig); \
6563 volatile unsigned long long _argvec[13]; \
6564 volatile unsigned long long _res; \
6565 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6566 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6567 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6568 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6569 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6570 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6571 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6572 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6573 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6574 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6575 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6576 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6577 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6579 "dsubu $29, $29, 32\n\t" \
6580 "ld $4, 72(%1)\n\t" \
6581 "sd $4, 0($29)\n\t" \
6582 "ld $4, 80(%1)\n\t" \
6583 "sd $4, 8($29)\n\t" \
6584 "ld $4, 88(%1)\n\t" \
6585 "sd $4, 16($29)\n\t" \
6586 "ld $4, 96(%1)\n\t" \
6587 "sd $4, 24($29)\n\t" \
6588 "ld $4, 8(%1)\n\t" \
6589 "ld $5, 16(%1)\n\t" \
6590 "ld $6, 24(%1)\n\t" \
6591 "ld $7, 32(%1)\n\t" \
6592 "ld $8, 40(%1)\n\t" \
6593 "ld $9, 48(%1)\n\t" \
6594 "ld $10, 56(%1)\n\t" \
6595 "ld $11, 64(%1)\n\t" \
6596 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6597 VALGRIND_CALL_NOREDIR_T9 \
6598 "daddu $29, $29, 32\n\t" \
6600 : /*out*/ "=r" (_res) \
6601 : /*in*/ "r" (&_argvec[0]) \
6602 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6604 lval = (__typeof__(lval)) (long)_res; \
6607 #endif /* PLAT_mips64_linux */
6609 /* ------------------------------------------------------------------ */
6610 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6612 /* ------------------------------------------------------------------ */
6614 /* Some request codes. There are many more of these, but most are not
6615 exposed to end-user view. These are the public ones, all of the
6616 form 0x1000 + small_number.
6618 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6619 ones start at 0x2000.
6622 /* These macros are used by tools -- they must be public, but don't
6623 embed them into other programs. */
6624 #define VG_USERREQ_TOOL_BASE(a,b) \
6625 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6626 #define VG_IS_TOOL_USERREQ(a, b, v) \
6627 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6629 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6630 This enum comprises an ABI exported by Valgrind to programs
6631 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6632 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6635 enum { VG_USERREQ__RUNNING_ON_VALGRIND
= 0x1001,
6636 VG_USERREQ__DISCARD_TRANSLATIONS
= 0x1002,
6638 /* These allow any function to be called from the simulated
6639 CPU but run on the real CPU. Nb: the first arg passed to
6640 the function is always the ThreadId of the running
6641 thread! So CLIENT_CALL0 actually requires a 1 arg
6643 VG_USERREQ__CLIENT_CALL0
= 0x1101,
6644 VG_USERREQ__CLIENT_CALL1
= 0x1102,
6645 VG_USERREQ__CLIENT_CALL2
= 0x1103,
6646 VG_USERREQ__CLIENT_CALL3
= 0x1104,
6648 /* Can be useful in regression testing suites -- eg. can
6649 send Valgrind's output to /dev/null and still count
6651 VG_USERREQ__COUNT_ERRORS
= 0x1201,
6653 /* Allows the client program and/or gdbserver to execute a monitor
6655 VG_USERREQ__GDB_MONITOR_COMMAND
= 0x1202,
6657 /* Allows the client program to change a dynamic command line
6659 VG_USERREQ__CLO_CHANGE
= 0x1203,
6661 /* These are useful and can be interpreted by any tool that
6662 tracks malloc() et al, by using vg_replace_malloc.c. */
6663 VG_USERREQ__MALLOCLIKE_BLOCK
= 0x1301,
6664 VG_USERREQ__RESIZEINPLACE_BLOCK
= 0x130b,
6665 VG_USERREQ__FREELIKE_BLOCK
= 0x1302,
6666 /* Memory pool support. */
6667 VG_USERREQ__CREATE_MEMPOOL
= 0x1303,
6668 VG_USERREQ__DESTROY_MEMPOOL
= 0x1304,
6669 VG_USERREQ__MEMPOOL_ALLOC
= 0x1305,
6670 VG_USERREQ__MEMPOOL_FREE
= 0x1306,
6671 VG_USERREQ__MEMPOOL_TRIM
= 0x1307,
6672 VG_USERREQ__MOVE_MEMPOOL
= 0x1308,
6673 VG_USERREQ__MEMPOOL_CHANGE
= 0x1309,
6674 VG_USERREQ__MEMPOOL_EXISTS
= 0x130a,
6676 /* Allow printfs to valgrind log. */
6677 /* The first two pass the va_list argument by value, which
6678 assumes it is the same size as or smaller than a UWord,
6679 which generally isn't the case. Hence are deprecated.
6680 The second two pass the vargs by reference and so are
6681 immune to this problem. */
6682 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6683 VG_USERREQ__PRINTF
= 0x1401,
6684 VG_USERREQ__PRINTF_BACKTRACE
= 0x1402,
6685 /* both :: char* fmt, va_list* vargs */
6686 VG_USERREQ__PRINTF_VALIST_BY_REF
= 0x1403,
6687 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
= 0x1404,
6689 /* Stack support. */
6690 VG_USERREQ__STACK_REGISTER
= 0x1501,
6691 VG_USERREQ__STACK_DEREGISTER
= 0x1502,
6692 VG_USERREQ__STACK_CHANGE
= 0x1503,
6695 VG_USERREQ__LOAD_PDB_DEBUGINFO
= 0x1601,
6697 /* Querying of debug info. */
6698 VG_USERREQ__MAP_IP_TO_SRCLOC
= 0x1701,
6700 /* Disable/enable error reporting level. Takes a single
6701 Word arg which is the delta to this thread's error
6702 disablement indicator. Hence 1 disables or further
6703 disables errors, and -1 moves back towards enablement.
6704 Other values are not allowed. */
6705 VG_USERREQ__CHANGE_ERR_DISABLEMENT
= 0x1801,
6707 /* Some requests used for Valgrind internal, such as
6708 self-test or self-hosting. */
6709 /* Initialise IR injection */
6710 VG_USERREQ__VEX_INIT_FOR_IRI
= 0x1901,
6711 /* Used by Inner Valgrind to inform Outer Valgrind where to
6712 find the list of inner guest threads */
6713 VG_USERREQ__INNER_THREADS
= 0x1902
6716 #if !defined(__GNUC__)
6717 # define __extension__ /* */
6721 /* Returns the number of Valgrinds this code is running under. That
6722 is, 0 if running natively, 1 if running under Valgrind, 2 if
6723 running under Valgrind which is running under another Valgrind,
6725 #define RUNNING_ON_VALGRIND \
6726 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6727 VG_USERREQ__RUNNING_ON_VALGRIND, \
6731 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6732 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6733 since it provides a way to make sure valgrind will retranslate the
6734 invalidated area. Returns no value. */
6735 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6736 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6737 _qzz_addr, _qzz_len, 0, 0, 0)
6739 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6740 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6741 _qzz_addr, 0, 0, 0, 0)
6744 /* These requests are for getting Valgrind itself to print something.
6745 Possibly with a backtrace. This is a really ugly hack. The return value
6746 is the number of characters printed, excluding the "**<pid>** " part at the
6747 start and the backtrace (if present). */
6749 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6750 /* Modern GCC will optimize the static routine out if unused,
6751 and unused attribute will shut down warnings about it. */
6752 static int VALGRIND_PRINTF(const char *format
, ...)
6753 __attribute__((format(__printf__
, 1, 2), __unused__
));
6756 #if defined(_MSC_VER)
6759 VALGRIND_PRINTF(const char *format
, ...)
6761 #if defined(NVALGRIND)
6764 #else /* NVALGRIND */
6765 #if defined(_MSC_VER) || defined(__MINGW64__)
6768 unsigned long _qzz_res
;
6771 va_start(vargs
, format
);
6772 #if defined(_MSC_VER) || defined(__MINGW64__)
6773 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6774 VG_USERREQ__PRINTF_VALIST_BY_REF
,
6779 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6780 VG_USERREQ__PRINTF_VALIST_BY_REF
,
6781 (unsigned long)format
,
6782 (unsigned long)&vargs
,
6786 return (int)_qzz_res
;
6787 #endif /* NVALGRIND */
6790 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6791 static int VALGRIND_PRINTF_BACKTRACE(const char *format
, ...)
6792 __attribute__((format(__printf__
, 1, 2), __unused__
));
6795 #if defined(_MSC_VER)
6798 VALGRIND_PRINTF_BACKTRACE(const char *format
, ...)
6800 #if defined(NVALGRIND)
6803 #else /* NVALGRIND */
6804 #if defined(_MSC_VER) || defined(__MINGW64__)
6807 unsigned long _qzz_res
;
6810 va_start(vargs
, format
);
6811 #if defined(_MSC_VER) || defined(__MINGW64__)
6812 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6813 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
,
6818 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6819 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
,
6820 (unsigned long)format
,
6821 (unsigned long)&vargs
,
6825 return (int)_qzz_res
;
6826 #endif /* NVALGRIND */
6830 /* These requests allow control to move from the simulated CPU to the
6831 real CPU, calling an arbitrary function.
6833 Note that the current ThreadId is inserted as the first argument.
6836 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6838 requires f to have this signature:
6840 Word f(Word tid, Word arg1, Word arg2)
6842 where "Word" is a word-sized type.
6844 Note that these client requests are not entirely reliable. For example,
6845 if you call a function with them that subsequently calls printf(),
6846 there's a high chance Valgrind will crash. Generally, your prospects of
6847 these working are made higher if the called function does not refer to
6848 any global variables, and does not refer to any libc or other functions
6849 (printf et al). Any kind of entanglement with libc or dynamic linking is
6850 likely to have a bad outcome, for tricky reasons which we've grappled
6851 with a lot in the past.
6853 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6854 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6855 VG_USERREQ__CLIENT_CALL0, \
6859 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6860 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6861 VG_USERREQ__CLIENT_CALL1, \
6865 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6866 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6867 VG_USERREQ__CLIENT_CALL2, \
6869 _qyy_arg1, _qyy_arg2, 0, 0)
6871 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6872 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6873 VG_USERREQ__CLIENT_CALL3, \
6875 _qyy_arg1, _qyy_arg2, \
6879 /* Counts the number of errors that have been recorded by a tool. Nb:
6880 the tool must record the errors with VG_(maybe_record_error)() or
6881 VG_(unique_error)() for them to be counted. */
6882 #define VALGRIND_COUNT_ERRORS \
6883 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6884 0 /* default return */, \
6885 VG_USERREQ__COUNT_ERRORS, \
6888 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6889 when heap blocks are allocated in order to give accurate results. This
6890 happens automatically for the standard allocator functions such as
6891 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6894 But if your program uses a custom allocator, this doesn't automatically
6895 happen, and Valgrind will not do as well. For example, if you allocate
6896 superblocks with mmap() and then allocates chunks of the superblocks, all
6897 Valgrind's observations will be at the mmap() level and it won't know that
6898 the chunks should be considered separate entities. In Memcheck's case,
6899 that means you probably won't get heap block overrun detection (because
6900 there won't be redzones marked as unaddressable) and you definitely won't
6901 get any leak detection.
6903 The following client requests allow a custom allocator to be annotated so
6904 that it can be handled accurately by Valgrind.
6906 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6907 by a malloc()-like function. For Memcheck (an illustrative case), this
6910 - It records that the block has been allocated. This means any addresses
6911 within the block mentioned in error messages will be
6912 identified as belonging to the block. It also means that if the block
6913 isn't freed it will be detected by the leak checker.
6915 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6916 not set), or addressable and defined (if 'is_zeroed' is set). This
6917 controls how accesses to the block by the program are handled.
6919 'addr' is the start of the usable block (ie. after any
6920 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6921 can apply redzones -- these are blocks of padding at the start and end of
6922 each block. Adding redzones is recommended as it makes it much more likely
6923 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6924 zeroed (or filled with another predictable value), as is the case for
6927 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6928 heap block -- that will be used by the client program -- is allocated.
6929 It's best to put it at the outermost level of the allocator if possible;
6930 for example, if you have a function my_alloc() which calls
6931 internal_alloc(), and the client request is put inside internal_alloc(),
6932 stack traces relating to the heap block will contain entries for both
6933 my_alloc() and internal_alloc(), which is probably not what you want.
6935 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6936 custom blocks from within a heap block, B, that has been allocated with
6937 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6938 -- the custom blocks will take precedence.
6940 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6941 Memcheck, it does two things:
6943 - It records that the block has been deallocated. This assumes that the
6944 block was annotated as having been allocated via
6945 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6947 - It marks the block as being unaddressable.
6949 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6950 heap block is deallocated.
6952 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6953 Memcheck, it does four things:
6955 - It records that the size of a block has been changed. This assumes that
6956 the block was annotated as having been allocated via
6957 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6959 - If the block shrunk, it marks the freed memory as being unaddressable.
6961 - If the block grew, it marks the new area as undefined and defines a red
6962 zone past the end of the new block.
6964 - The V-bits of the overlap between the old and the new block are preserved.
6966 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6967 and before deallocation of the old block.
6969 In many cases, these three client requests will not be enough to get your
6970 allocator working well with Memcheck. More specifically, if your allocator
6971 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6972 will be necessary to mark the memory as addressable just before the zeroing
6973 occurs, otherwise you'll get a lot of invalid write errors. For example,
6974 you'll need to do this if your allocator recycles freed blocks, but it
6975 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6976 Alternatively, if your allocator reuses freed blocks for allocator-internal
6977 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6979 Really, what's happening is a blurring of the lines between the client
6980 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6981 memory should be considered unaddressable to the client program, but the
6982 allocator knows more than the rest of the client program and so may be able
6983 to safely access it. Extra client requests are necessary for Valgrind to
6984 understand the distinction between the allocator and the rest of the
6987 Ignored if addr == 0.
6989 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6990 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6991 addr, sizeB, rzB, is_zeroed, 0)
6993 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6994 Ignored if addr == 0.
6996 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6997 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6998 addr, oldSizeB, newSizeB, rzB, 0)
7000 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
7001 Ignored if addr == 0.
7003 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7004 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7007 /* Create a memory pool. */
7008 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7009 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7010 pool, rzB, is_zeroed, 0, 0)
7012 /* Create a memory pool with some flags specifying extended behaviour.
7013 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
7015 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
7016 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
7017 by the application as superblocks to dole out MALLOC_LIKE blocks using
7018 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
7019 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
7020 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
7021 Note that the association between the pool and the second level blocks
7022 is implicit : second level blocks will be located inside first level
7023 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
7024 for such 2 levels pools, as otherwise valgrind will detect overlapping
7025 memory blocks, and will abort execution (e.g. during leak search).
7027 Such a meta pool can also be marked as an 'auto free' pool using the flag
7028 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
7029 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
7030 will automatically free the second level blocks that are contained
7031 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
7032 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
7033 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
7034 in the first level block.
7035 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
7036 without the VALGRIND_MEMPOOL_METAPOOL flag.
7038 #define VALGRIND_MEMPOOL_AUTO_FREE 1
7039 #define VALGRIND_MEMPOOL_METAPOOL 2
7040 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7041 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7042 pool, rzB, is_zeroed, flags, 0)
7044 /* Destroy a memory pool. */
7045 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7046 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7049 /* Associate a piece of memory with a memory pool. */
7050 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7051 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7052 pool, addr, size, 0, 0)
7054 /* Disassociate a piece of memory from a memory pool. */
7055 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7056 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7057 pool, addr, 0, 0, 0)
7059 /* Disassociate any pieces outside a particular range. */
7060 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7061 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7062 pool, addr, size, 0, 0)
7064 /* Resize and/or move a piece associated with a memory pool. */
7065 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7066 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7067 poolA, poolB, 0, 0, 0)
7069 /* Resize and/or move a piece associated with a memory pool. */
7070 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7071 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7072 pool, addrA, addrB, size, 0)
7074 /* Return 1 if a mempool exists, else 0. */
7075 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7076 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7077 VG_USERREQ__MEMPOOL_EXISTS, \
7080 /* Mark a piece of memory as being a stack. Returns a stack id.
7081 start is the lowest addressable stack byte, end is the highest
7082 addressable stack byte. */
7083 #define VALGRIND_STACK_REGISTER(start, end) \
7084 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7085 VG_USERREQ__STACK_REGISTER, \
7086 start, end, 0, 0, 0)
7088 /* Unmark the piece of memory associated with a stack id as being a
7090 #define VALGRIND_STACK_DEREGISTER(id) \
7091 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7094 /* Change the start and end address of the stack id.
7095 start is the new lowest addressable stack byte, end is the new highest
7096 addressable stack byte. */
7097 #define VALGRIND_STACK_CHANGE(id, start, end) \
7098 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7099 id, start, end, 0, 0)
7101 /* Load PDB debug info for Wine PE image_map. */
7102 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7103 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7104 fd, ptr, total_size, delta, 0)
7106 /* Map a code address to a source file name and line number. buf64
7107 must point to a 64-byte buffer in the caller's address space. The
7108 result will be dumped in there and is guaranteed to be zero
7109 terminated. If no info is found, the first byte is set to zero. */
7110 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7111 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7112 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7113 addr, buf64, 0, 0, 0)
7115 /* Disable error reporting for this thread. Behaves in a stack like
7116 way, so you can safely call this multiple times provided that
7117 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7118 to re-enable reporting. The first call of this macro disables
7119 reporting. Subsequent calls have no effect except to increase the
7120 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7121 reporting. Child threads do not inherit this setting from their
7122 parents -- they are always created with reporting enabled. */
7123 #define VALGRIND_DISABLE_ERROR_REPORTING \
7124 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7127 /* Re-enable error reporting, as per comments on
7128 VALGRIND_DISABLE_ERROR_REPORTING. */
7129 #define VALGRIND_ENABLE_ERROR_REPORTING \
7130 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7133 /* Execute a monitor command from the client program.
7134 If a connection is opened with GDB, the output will be sent
7135 according to the output mode set for vgdb.
7136 If no connection is opened, output will go to the log output.
7137 Returns 1 if command not recognised, 0 otherwise. */
7138 #define VALGRIND_MONITOR_COMMAND(command) \
7139 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7140 command, 0, 0, 0, 0)
7143 /* Change the value of a dynamic command line option.
7144 Note that unknown or not dynamically changeable options
7145 will cause a warning message to be output. */
7146 #define VALGRIND_CLO_CHANGE(option) \
7147 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7151 #undef PLAT_x86_darwin
7152 #undef PLAT_amd64_darwin
7153 #undef PLAT_x86_win32
7154 #undef PLAT_amd64_win64
7155 #undef PLAT_x86_linux
7156 #undef PLAT_amd64_linux
7157 #undef PLAT_ppc32_linux
7158 #undef PLAT_ppc64be_linux
7159 #undef PLAT_ppc64le_linux
7160 #undef PLAT_arm_linux
7161 #undef PLAT_s390x_linux
7162 #undef PLAT_mips32_linux
7163 #undef PLAT_mips64_linux
7164 #undef PLAT_nanomips_linux
7165 #undef PLAT_x86_solaris
7166 #undef PLAT_amd64_solaris
7168 #endif /* __VALGRIND_H */