2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 16
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64be_linux
119 #undef PLAT_ppc64le_linux
120 #undef PLAT_arm_linux
121 #undef PLAT_arm64_linux
122 #undef PLAT_s390x_linux
123 #undef PLAT_mips32_linux
124 #undef PLAT_mips64_linux
125 #undef PLAT_x86_solaris
126 #undef PLAT_amd64_solaris
129 #if defined(__APPLE__) && defined(__i386__)
130 # define PLAT_x86_darwin 1
131 #elif defined(__APPLE__) && defined(__x86_64__)
132 # define PLAT_amd64_darwin 1
133 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
134 || defined(__CYGWIN32__) \
135 || (defined(_WIN32) && defined(_M_IX86))
136 # define PLAT_x86_win32 1
137 #elif defined(__MINGW64__) \
138 || (defined(_WIN64) && defined(_M_X64))
139 # define PLAT_amd64_win64 1
140 #elif defined(__linux__) && defined(__i386__)
141 # define PLAT_x86_linux 1
142 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
143 # define PLAT_amd64_linux 1
144 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
145 # define PLAT_ppc32_linux 1
146 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
147 /* Big Endian uses ELF version 1 */
148 # define PLAT_ppc64be_linux 1
149 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
150 /* Little Endian uses ELF version 2 */
151 # define PLAT_ppc64le_linux 1
152 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
153 # define PLAT_arm_linux 1
154 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
155 # define PLAT_arm64_linux 1
156 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
157 # define PLAT_s390x_linux 1
158 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
159 # define PLAT_mips64_linux 1
160 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
161 # define PLAT_mips32_linux 1
162 #elif defined(__sun) && defined(__i386__)
163 # define PLAT_x86_solaris 1
164 #elif defined(__sun) && defined(__x86_64__)
165 # define PLAT_amd64_solaris 1
167 /* If we're not compiling for our target platform, don't generate
169 # if !defined(NVALGRIND)
175 /* ------------------------------------------------------------------ */
176 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
177 /* in here of use to end-users -- skip to the next section. */
178 /* ------------------------------------------------------------------ */
181 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
182 * request. Accepts both pointers and integers as arguments.
184 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
185 * client request that does not return a value.
187 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
188 * client request and whose value equals the client request result. Accepts
189 * both pointers and integers as arguments. Note that such calls are not
190 * necessarily pure functions -- they may have side effects.
193 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
194 _zzq_request, _zzq_arg1, _zzq_arg2, \
195 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
196 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
197 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
198 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
200 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
201 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
202 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
203 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
204 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
206 #if defined(NVALGRIND)
208 /* Define NVALGRIND to completely remove the Valgrind magic sequence
209 from the compiled code (analogous to NDEBUG's effects on
211 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
212 _zzq_default, _zzq_request, \
213 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
216 #else /* ! NVALGRIND */
218 /* The following defines the magic code sequences which the JITter
219 spots and handles magically. Don't look too closely at them as
220 they will rot your brain.
222 The assembly code sequences for all architectures is in this one
223 file. This is because this file must be stand-alone, and we don't
224 want to have multiple files.
226 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
227 value gets put in the return slot, so that everything works when
228 this is executed not under Valgrind. Args are passed in a memory
229 block, and so there's no intrinsic limit to the number that could
230 be passed, but it's currently five.
233 _zzq_rlval result lvalue
234 _zzq_default default value (result returned when running on real CPU)
235 _zzq_request request code
236 _zzq_arg1..5 request params
238 The other two macros are used to support function wrapping, and are
239 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
240 guest's NRADDR pseudo-register and whatever other information is
241 needed to safely run the call original from the wrapper: on
242 ppc64-linux, the R2 value at the divert point is also needed. This
243 information is abstracted into a user-visible type, OrigFn.
245 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
246 guest, but guarantees that the branch instruction will not be
247 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
248 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
249 complete inline asm, since it needs to be combined with more magic
250 inline asm stuff to be useful.
253 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
255 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
256 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
257 || defined(PLAT_x86_solaris)
261 unsigned int nraddr
; /* where's the code? */
265 #define __SPECIAL_INSTRUCTION_PREAMBLE \
266 "roll $3, %%edi ; roll $13, %%edi\n\t" \
267 "roll $29, %%edi ; roll $19, %%edi\n\t"
269 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
270 _zzq_default, _zzq_request, \
271 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
273 ({volatile unsigned int _zzq_args[6]; \
274 volatile unsigned int _zzq_result; \
275 _zzq_args[0] = (unsigned int)(_zzq_request); \
276 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
277 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
278 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
279 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
280 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
281 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
282 /* %EDX = client_request ( %EAX ) */ \
283 "xchgl %%ebx,%%ebx" \
284 : "=d" (_zzq_result) \
285 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
291 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
292 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
293 volatile unsigned int __addr; \
294 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
295 /* %EAX = guest_NRADDR */ \
296 "xchgl %%ecx,%%ecx" \
301 _zzq_orig->nraddr = __addr; \
304 #define VALGRIND_CALL_NOREDIR_EAX \
305 __SPECIAL_INSTRUCTION_PREAMBLE \
306 /* call-noredir *%EAX */ \
307 "xchgl %%edx,%%edx\n\t"
309 #define VALGRIND_VEX_INJECT_IR() \
311 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
312 "xchgl %%edi,%%edi\n\t" \
313 : : : "cc", "memory" \
317 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
318 || PLAT_x86_solaris */
320 /* ------------------------- x86-Win32 ------------------------- */
322 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
326 unsigned int nraddr
; /* where's the code? */
330 #if defined(_MSC_VER)
332 #define __SPECIAL_INSTRUCTION_PREAMBLE \
333 __asm rol edi, 3 __asm rol edi, 13 \
334 __asm rol edi, 29 __asm rol edi, 19
336 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
337 _zzq_default, _zzq_request, \
338 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
339 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
340 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
341 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
342 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
344 static __inline
uintptr_t
345 valgrind_do_client_request_expr(uintptr_t _zzq_default
, uintptr_t _zzq_request
,
346 uintptr_t _zzq_arg1
, uintptr_t _zzq_arg2
,
347 uintptr_t _zzq_arg3
, uintptr_t _zzq_arg4
,
350 volatile uintptr_t _zzq_args
[6];
351 volatile unsigned int _zzq_result
;
352 _zzq_args
[0] = (uintptr_t)(_zzq_request
);
353 _zzq_args
[1] = (uintptr_t)(_zzq_arg1
);
354 _zzq_args
[2] = (uintptr_t)(_zzq_arg2
);
355 _zzq_args
[3] = (uintptr_t)(_zzq_arg3
);
356 _zzq_args
[4] = (uintptr_t)(_zzq_arg4
);
357 _zzq_args
[5] = (uintptr_t)(_zzq_arg5
);
358 __asm
{ __asm lea eax
, _zzq_args __asm mov edx
, _zzq_default
359 __SPECIAL_INSTRUCTION_PREAMBLE
360 /* %EDX = client_request ( %EAX ) */
362 __asm mov _zzq_result
, edx
367 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
368 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
369 volatile unsigned int __addr; \
370 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
371 /* %EAX = guest_NRADDR */ \
373 __asm mov __addr, eax \
375 _zzq_orig->nraddr = __addr; \
378 #define VALGRIND_CALL_NOREDIR_EAX ERROR
380 #define VALGRIND_VEX_INJECT_IR() \
382 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
388 #error Unsupported compiler.
391 #endif /* PLAT_x86_win32 */
393 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
395 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
396 || defined(PLAT_amd64_solaris) \
397 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
401 unsigned long int nraddr
; /* where's the code? */
405 #define __SPECIAL_INSTRUCTION_PREAMBLE \
406 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
407 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
409 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
410 _zzq_default, _zzq_request, \
411 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
413 ({ volatile unsigned long int _zzq_args[6]; \
414 volatile unsigned long int _zzq_result; \
415 _zzq_args[0] = (unsigned long int)(_zzq_request); \
416 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
417 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
418 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
419 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
420 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
421 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
422 /* %RDX = client_request ( %RAX ) */ \
423 "xchgq %%rbx,%%rbx" \
424 : "=d" (_zzq_result) \
425 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
431 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
432 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
433 volatile unsigned long int __addr; \
434 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
435 /* %RAX = guest_NRADDR */ \
436 "xchgq %%rcx,%%rcx" \
441 _zzq_orig->nraddr = __addr; \
444 #define VALGRIND_CALL_NOREDIR_RAX \
445 __SPECIAL_INSTRUCTION_PREAMBLE \
446 /* call-noredir *%RAX */ \
447 "xchgq %%rdx,%%rdx\n\t"
449 #define VALGRIND_VEX_INJECT_IR() \
451 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
452 "xchgq %%rdi,%%rdi\n\t" \
453 : : : "cc", "memory" \
457 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
459 /* ------------------------- amd64-Win64 ------------------------- */
461 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
463 #error Unsupported compiler.
465 #endif /* PLAT_amd64_win64 */
467 /* ------------------------ ppc32-linux ------------------------ */
469 #if defined(PLAT_ppc32_linux)
473 unsigned int nraddr
; /* where's the code? */
477 #define __SPECIAL_INSTRUCTION_PREAMBLE \
478 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
479 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
481 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
482 _zzq_default, _zzq_request, \
483 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
486 ({ unsigned int _zzq_args[6]; \
487 unsigned int _zzq_result; \
488 unsigned int* _zzq_ptr; \
489 _zzq_args[0] = (unsigned int)(_zzq_request); \
490 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
491 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
492 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
493 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
494 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
495 _zzq_ptr = _zzq_args; \
496 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
497 "mr 4,%2\n\t" /*ptr*/ \
498 __SPECIAL_INSTRUCTION_PREAMBLE \
499 /* %R3 = client_request ( %R4 ) */ \
501 "mr %0,3" /*result*/ \
502 : "=b" (_zzq_result) \
503 : "b" (_zzq_default), "b" (_zzq_ptr) \
504 : "cc", "memory", "r3", "r4"); \
508 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
509 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
510 unsigned int __addr; \
511 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
512 /* %R3 = guest_NRADDR */ \
517 : "cc", "memory", "r3" \
519 _zzq_orig->nraddr = __addr; \
522 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
523 __SPECIAL_INSTRUCTION_PREAMBLE \
524 /* branch-and-link-to-noredir *%R11 */ \
527 #define VALGRIND_VEX_INJECT_IR() \
529 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
534 #endif /* PLAT_ppc32_linux */
536 /* ------------------------ ppc64-linux ------------------------ */
538 #if defined(PLAT_ppc64be_linux)
542 unsigned long int nraddr
; /* where's the code? */
543 unsigned long int r2
; /* what tocptr do we need? */
547 #define __SPECIAL_INSTRUCTION_PREAMBLE \
548 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
549 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
551 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
552 _zzq_default, _zzq_request, \
553 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
556 ({ unsigned long int _zzq_args[6]; \
557 unsigned long int _zzq_result; \
558 unsigned long int* _zzq_ptr; \
559 _zzq_args[0] = (unsigned long int)(_zzq_request); \
560 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
561 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
562 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
563 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
564 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
565 _zzq_ptr = _zzq_args; \
566 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
567 "mr 4,%2\n\t" /*ptr*/ \
568 __SPECIAL_INSTRUCTION_PREAMBLE \
569 /* %R3 = client_request ( %R4 ) */ \
571 "mr %0,3" /*result*/ \
572 : "=b" (_zzq_result) \
573 : "b" (_zzq_default), "b" (_zzq_ptr) \
574 : "cc", "memory", "r3", "r4"); \
578 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
579 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
580 unsigned long int __addr; \
581 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
582 /* %R3 = guest_NRADDR */ \
587 : "cc", "memory", "r3" \
589 _zzq_orig->nraddr = __addr; \
590 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
591 /* %R3 = guest_NRADDR_GPR2 */ \
596 : "cc", "memory", "r3" \
598 _zzq_orig->r2 = __addr; \
601 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
602 __SPECIAL_INSTRUCTION_PREAMBLE \
603 /* branch-and-link-to-noredir *%R11 */ \
606 #define VALGRIND_VEX_INJECT_IR() \
608 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
613 #endif /* PLAT_ppc64be_linux */
615 #if defined(PLAT_ppc64le_linux)
619 unsigned long int nraddr
; /* where's the code? */
620 unsigned long int r2
; /* what tocptr do we need? */
624 #define __SPECIAL_INSTRUCTION_PREAMBLE \
625 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
626 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
628 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
629 _zzq_default, _zzq_request, \
630 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
633 ({ unsigned long int _zzq_args[6]; \
634 unsigned long int _zzq_result; \
635 unsigned long int* _zzq_ptr; \
636 _zzq_args[0] = (unsigned long int)(_zzq_request); \
637 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
638 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
639 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
640 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
641 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
642 _zzq_ptr = _zzq_args; \
643 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
644 "mr 4,%2\n\t" /*ptr*/ \
645 __SPECIAL_INSTRUCTION_PREAMBLE \
646 /* %R3 = client_request ( %R4 ) */ \
648 "mr %0,3" /*result*/ \
649 : "=b" (_zzq_result) \
650 : "b" (_zzq_default), "b" (_zzq_ptr) \
651 : "cc", "memory", "r3", "r4"); \
655 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
656 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
657 unsigned long int __addr; \
658 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
659 /* %R3 = guest_NRADDR */ \
664 : "cc", "memory", "r3" \
666 _zzq_orig->nraddr = __addr; \
667 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
668 /* %R3 = guest_NRADDR_GPR2 */ \
673 : "cc", "memory", "r3" \
675 _zzq_orig->r2 = __addr; \
678 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
679 __SPECIAL_INSTRUCTION_PREAMBLE \
680 /* branch-and-link-to-noredir *%R12 */ \
683 #define VALGRIND_VEX_INJECT_IR() \
685 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
690 #endif /* PLAT_ppc64le_linux */
692 /* ------------------------- arm-linux ------------------------- */
694 #if defined(PLAT_arm_linux)
698 unsigned int nraddr
; /* where's the code? */
702 #define __SPECIAL_INSTRUCTION_PREAMBLE \
703 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
704 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
706 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
707 _zzq_default, _zzq_request, \
708 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
711 ({volatile unsigned int _zzq_args[6]; \
712 volatile unsigned int _zzq_result; \
713 _zzq_args[0] = (unsigned int)(_zzq_request); \
714 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
715 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
716 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
717 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
718 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
719 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
720 "mov r4, %2\n\t" /*ptr*/ \
721 __SPECIAL_INSTRUCTION_PREAMBLE \
722 /* R3 = client_request ( R4 ) */ \
723 "orr r10, r10, r10\n\t" \
724 "mov %0, r3" /*result*/ \
725 : "=r" (_zzq_result) \
726 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
727 : "cc","memory", "r3", "r4"); \
731 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
732 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
733 unsigned int __addr; \
734 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
735 /* R3 = guest_NRADDR */ \
736 "orr r11, r11, r11\n\t" \
740 : "cc", "memory", "r3" \
742 _zzq_orig->nraddr = __addr; \
745 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
746 __SPECIAL_INSTRUCTION_PREAMBLE \
747 /* branch-and-link-to-noredir *%R4 */ \
748 "orr r12, r12, r12\n\t"
750 #define VALGRIND_VEX_INJECT_IR() \
752 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
753 "orr r9, r9, r9\n\t" \
754 : : : "cc", "memory" \
758 #endif /* PLAT_arm_linux */
760 /* ------------------------ arm64-linux ------------------------- */
762 #if defined(PLAT_arm64_linux)
766 unsigned long int nraddr
; /* where's the code? */
770 #define __SPECIAL_INSTRUCTION_PREAMBLE \
771 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
772 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
774 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
775 _zzq_default, _zzq_request, \
776 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
779 ({volatile unsigned long int _zzq_args[6]; \
780 volatile unsigned long int _zzq_result; \
781 _zzq_args[0] = (unsigned long int)(_zzq_request); \
782 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
783 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
784 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
785 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
786 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
787 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
788 "mov x4, %2\n\t" /*ptr*/ \
789 __SPECIAL_INSTRUCTION_PREAMBLE \
790 /* X3 = client_request ( X4 ) */ \
791 "orr x10, x10, x10\n\t" \
792 "mov %0, x3" /*result*/ \
793 : "=r" (_zzq_result) \
794 : "r" ((unsigned long int)(_zzq_default)), \
795 "r" (&_zzq_args[0]) \
796 : "cc","memory", "x3", "x4"); \
800 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
801 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
802 unsigned long int __addr; \
803 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
804 /* X3 = guest_NRADDR */ \
805 "orr x11, x11, x11\n\t" \
809 : "cc", "memory", "x3" \
811 _zzq_orig->nraddr = __addr; \
814 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
815 __SPECIAL_INSTRUCTION_PREAMBLE \
816 /* branch-and-link-to-noredir X8 */ \
817 "orr x12, x12, x12\n\t"
819 #define VALGRIND_VEX_INJECT_IR() \
821 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
822 "orr x9, x9, x9\n\t" \
823 : : : "cc", "memory" \
827 #endif /* PLAT_arm64_linux */
829 /* ------------------------ s390x-linux ------------------------ */
831 #if defined(PLAT_s390x_linux)
835 unsigned long int nraddr
; /* where's the code? */
839 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
840 * code. This detection is implemented in platform specific toIR.c
841 * (e.g. VEX/priv/guest_s390_decoder.c).
843 #define __SPECIAL_INSTRUCTION_PREAMBLE \
849 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
850 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
851 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
852 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
854 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
855 _zzq_default, _zzq_request, \
856 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
858 ({volatile unsigned long int _zzq_args[6]; \
859 volatile unsigned long int _zzq_result; \
860 _zzq_args[0] = (unsigned long int)(_zzq_request); \
861 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
862 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
863 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
864 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
865 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
866 __asm__ volatile(/* r2 = args */ \
870 __SPECIAL_INSTRUCTION_PREAMBLE \
871 __CLIENT_REQUEST_CODE \
874 : "=d" (_zzq_result) \
875 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
876 : "cc", "2", "3", "memory" \
881 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
882 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
883 volatile unsigned long int __addr; \
884 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
885 __GET_NR_CONTEXT_CODE \
889 : "cc", "3", "memory" \
891 _zzq_orig->nraddr = __addr; \
894 #define VALGRIND_CALL_NOREDIR_R1 \
895 __SPECIAL_INSTRUCTION_PREAMBLE \
898 #define VALGRIND_VEX_INJECT_IR() \
900 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
901 __VEX_INJECT_IR_CODE); \
904 #endif /* PLAT_s390x_linux */
906 /* ------------------------- mips32-linux ---------------- */
908 #if defined(PLAT_mips32_linux)
912 unsigned int nraddr
; /* where's the code? */
920 #define __SPECIAL_INSTRUCTION_PREAMBLE \
921 "srl $0, $0, 13\n\t" \
922 "srl $0, $0, 29\n\t" \
923 "srl $0, $0, 3\n\t" \
926 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
927 _zzq_default, _zzq_request, \
928 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
930 ({ volatile unsigned int _zzq_args[6]; \
931 volatile unsigned int _zzq_result; \
932 _zzq_args[0] = (unsigned int)(_zzq_request); \
933 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
934 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
935 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
936 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
937 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
938 __asm__ volatile("move $11, %1\n\t" /*default*/ \
939 "move $12, %2\n\t" /*ptr*/ \
940 __SPECIAL_INSTRUCTION_PREAMBLE \
941 /* T3 = client_request ( T4 ) */ \
942 "or $13, $13, $13\n\t" \
943 "move %0, $11\n\t" /*result*/ \
944 : "=r" (_zzq_result) \
945 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
946 : "$11", "$12", "memory"); \
950 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
951 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
952 volatile unsigned int __addr; \
953 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
954 /* %t9 = guest_NRADDR */ \
955 "or $14, $14, $14\n\t" \
956 "move %0, $11" /*result*/ \
961 _zzq_orig->nraddr = __addr; \
964 #define VALGRIND_CALL_NOREDIR_T9 \
965 __SPECIAL_INSTRUCTION_PREAMBLE \
966 /* call-noredir *%t9 */ \
967 "or $15, $15, $15\n\t"
969 #define VALGRIND_VEX_INJECT_IR() \
971 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
972 "or $11, $11, $11\n\t" \
977 #endif /* PLAT_mips32_linux */
979 /* ------------------------- mips64-linux ---------------- */
981 #if defined(PLAT_mips64_linux)
985 unsigned long nraddr
; /* where's the code? */
993 #define __SPECIAL_INSTRUCTION_PREAMBLE \
994 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
995 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
997 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
998 _zzq_default, _zzq_request, \
999 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1001 ({ volatile unsigned long int _zzq_args[6]; \
1002 volatile unsigned long int _zzq_result; \
1003 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1004 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1005 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1006 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1007 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1008 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1009 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1010 "move $12, %2\n\t" /*ptr*/ \
1011 __SPECIAL_INSTRUCTION_PREAMBLE \
1012 /* $11 = client_request ( $12 ) */ \
1013 "or $13, $13, $13\n\t" \
1014 "move %0, $11\n\t" /*result*/ \
1015 : "=r" (_zzq_result) \
1016 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1017 : "$11", "$12", "memory"); \
1021 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1022 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1023 volatile unsigned long int __addr; \
1024 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1025 /* $11 = guest_NRADDR */ \
1026 "or $14, $14, $14\n\t" \
1027 "move %0, $11" /*result*/ \
1031 _zzq_orig->nraddr = __addr; \
1034 #define VALGRIND_CALL_NOREDIR_T9 \
1035 __SPECIAL_INSTRUCTION_PREAMBLE \
1036 /* call-noredir $25 */ \
1037 "or $15, $15, $15\n\t"
1039 #define VALGRIND_VEX_INJECT_IR() \
1041 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1042 "or $11, $11, $11\n\t" \
1046 #endif /* PLAT_mips64_linux */
1048 /* Insert assembly code for other platforms here... */
1050 #endif /* NVALGRIND */
1053 /* ------------------------------------------------------------------ */
1054 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1055 /* ugly. It's the least-worst tradeoff I can think of. */
1056 /* ------------------------------------------------------------------ */
1058 /* This section defines magic (a.k.a appalling-hack) macros for doing
1059 guaranteed-no-redirection macros, so as to get from function
1060 wrappers to the functions they are wrapping. The whole point is to
1061 construct standard call sequences, but to do the call itself with a
1062 special no-redirect call pseudo-instruction that the JIT
1063 understands and handles specially. This section is long and
1064 repetitious, and I can't see a way to make it shorter.
1066 The naming scheme is as follows:
1068 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1070 'W' stands for "word" and 'v' for "void". Hence there are
1071 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1072 and for each, the possibility of returning a word-typed result, or
1076 /* Use these to write the name of your wrapper. NOTE: duplicates
1077 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1078 the default behaviour equivalance class tag "0000" into the name.
1079 See pub_tool_redir.h for details -- normally you don't need to
1080 think about this, though. */
1082 /* Use an extra level of macroisation so as to ensure the soname/fnname
1083 args are fully macro-expanded before pasting them together. */
1084 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1086 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1087 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1089 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1090 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1092 /* Use this macro from within a wrapper function to collect the
1093 context (address and possibly other info) of the original function.
1094 Once you have that you can then use it in one of the CALL_FN_
1095 macros. The type of the argument _lval is OrigFn. */
1096 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1098 /* Also provide end-user facilities for function replacement, rather
1099 than wrapping. A replacement function differs from a wrapper in
1100 that it has no way to get hold of the original function being
1101 called, and hence no way to call onwards to it. In a replacement
1102 function, VALGRIND_GET_ORIG_FN always returns zero. */
1104 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1105 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1107 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1108 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1110 /* Derivatives of the main macros below, for calling functions
1113 #define CALL_FN_v_v(fnptr) \
1114 do { volatile unsigned long _junk; \
1115 CALL_FN_W_v(_junk,fnptr); } while (0)
1117 #define CALL_FN_v_W(fnptr, arg1) \
1118 do { volatile unsigned long _junk; \
1119 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1121 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1122 do { volatile unsigned long _junk; \
1123 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1125 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1126 do { volatile unsigned long _junk; \
1127 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1129 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1130 do { volatile unsigned long _junk; \
1131 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1133 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1134 do { volatile unsigned long _junk; \
1135 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1137 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1138 do { volatile unsigned long _junk; \
1139 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1141 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1142 do { volatile unsigned long _junk; \
1143 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1145 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1147 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1148 || defined(PLAT_x86_solaris)
1150 /* These regs are trashed by the hidden call. No need to mention eax
1151 as gcc can already see that, plus causes gcc to bomb. */
1152 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1154 /* Macros to save and align the stack before making a function
1155 call and restore it afterwards as gcc may not keep the stack
1156 pointer aligned if it doesn't realise calls are being made
1157 to other functions. */
1159 #define VALGRIND_ALIGN_STACK \
1160 "movl %%esp,%%edi\n\t" \
1161 "andl $0xfffffff0,%%esp\n\t"
1162 #define VALGRIND_RESTORE_STACK \
1163 "movl %%edi,%%esp\n\t"
1165 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1168 #define CALL_FN_W_v(lval, orig) \
1170 volatile OrigFn _orig = (orig); \
1171 volatile unsigned long _argvec[1]; \
1172 volatile unsigned long _res; \
1173 _argvec[0] = (unsigned long)_orig.nraddr; \
1175 VALGRIND_ALIGN_STACK \
1176 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1177 VALGRIND_CALL_NOREDIR_EAX \
1178 VALGRIND_RESTORE_STACK \
1179 : /*out*/ "=a" (_res) \
1180 : /*in*/ "a" (&_argvec[0]) \
1181 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1183 lval = (__typeof__(lval)) _res; \
1186 #define CALL_FN_W_W(lval, orig, arg1) \
1188 volatile OrigFn _orig = (orig); \
1189 volatile unsigned long _argvec[2]; \
1190 volatile unsigned long _res; \
1191 _argvec[0] = (unsigned long)_orig.nraddr; \
1192 _argvec[1] = (unsigned long)(arg1); \
1194 VALGRIND_ALIGN_STACK \
1195 "subl $12, %%esp\n\t" \
1196 "pushl 4(%%eax)\n\t" \
1197 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1198 VALGRIND_CALL_NOREDIR_EAX \
1199 VALGRIND_RESTORE_STACK \
1200 : /*out*/ "=a" (_res) \
1201 : /*in*/ "a" (&_argvec[0]) \
1202 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1204 lval = (__typeof__(lval)) _res; \
1207 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1209 volatile OrigFn _orig = (orig); \
1210 volatile unsigned long _argvec[3]; \
1211 volatile unsigned long _res; \
1212 _argvec[0] = (unsigned long)_orig.nraddr; \
1213 _argvec[1] = (unsigned long)(arg1); \
1214 _argvec[2] = (unsigned long)(arg2); \
1216 VALGRIND_ALIGN_STACK \
1217 "subl $8, %%esp\n\t" \
1218 "pushl 8(%%eax)\n\t" \
1219 "pushl 4(%%eax)\n\t" \
1220 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1221 VALGRIND_CALL_NOREDIR_EAX \
1222 VALGRIND_RESTORE_STACK \
1223 : /*out*/ "=a" (_res) \
1224 : /*in*/ "a" (&_argvec[0]) \
1225 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1227 lval = (__typeof__(lval)) _res; \
1230 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1232 volatile OrigFn _orig = (orig); \
1233 volatile unsigned long _argvec[4]; \
1234 volatile unsigned long _res; \
1235 _argvec[0] = (unsigned long)_orig.nraddr; \
1236 _argvec[1] = (unsigned long)(arg1); \
1237 _argvec[2] = (unsigned long)(arg2); \
1238 _argvec[3] = (unsigned long)(arg3); \
1240 VALGRIND_ALIGN_STACK \
1241 "subl $4, %%esp\n\t" \
1242 "pushl 12(%%eax)\n\t" \
1243 "pushl 8(%%eax)\n\t" \
1244 "pushl 4(%%eax)\n\t" \
1245 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1246 VALGRIND_CALL_NOREDIR_EAX \
1247 VALGRIND_RESTORE_STACK \
1248 : /*out*/ "=a" (_res) \
1249 : /*in*/ "a" (&_argvec[0]) \
1250 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1252 lval = (__typeof__(lval)) _res; \
1255 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1257 volatile OrigFn _orig = (orig); \
1258 volatile unsigned long _argvec[5]; \
1259 volatile unsigned long _res; \
1260 _argvec[0] = (unsigned long)_orig.nraddr; \
1261 _argvec[1] = (unsigned long)(arg1); \
1262 _argvec[2] = (unsigned long)(arg2); \
1263 _argvec[3] = (unsigned long)(arg3); \
1264 _argvec[4] = (unsigned long)(arg4); \
1266 VALGRIND_ALIGN_STACK \
1267 "pushl 16(%%eax)\n\t" \
1268 "pushl 12(%%eax)\n\t" \
1269 "pushl 8(%%eax)\n\t" \
1270 "pushl 4(%%eax)\n\t" \
1271 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1272 VALGRIND_CALL_NOREDIR_EAX \
1273 VALGRIND_RESTORE_STACK \
1274 : /*out*/ "=a" (_res) \
1275 : /*in*/ "a" (&_argvec[0]) \
1276 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1278 lval = (__typeof__(lval)) _res; \
1281 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1283 volatile OrigFn _orig = (orig); \
1284 volatile unsigned long _argvec[6]; \
1285 volatile unsigned long _res; \
1286 _argvec[0] = (unsigned long)_orig.nraddr; \
1287 _argvec[1] = (unsigned long)(arg1); \
1288 _argvec[2] = (unsigned long)(arg2); \
1289 _argvec[3] = (unsigned long)(arg3); \
1290 _argvec[4] = (unsigned long)(arg4); \
1291 _argvec[5] = (unsigned long)(arg5); \
1293 VALGRIND_ALIGN_STACK \
1294 "subl $12, %%esp\n\t" \
1295 "pushl 20(%%eax)\n\t" \
1296 "pushl 16(%%eax)\n\t" \
1297 "pushl 12(%%eax)\n\t" \
1298 "pushl 8(%%eax)\n\t" \
1299 "pushl 4(%%eax)\n\t" \
1300 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1301 VALGRIND_CALL_NOREDIR_EAX \
1302 VALGRIND_RESTORE_STACK \
1303 : /*out*/ "=a" (_res) \
1304 : /*in*/ "a" (&_argvec[0]) \
1305 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1307 lval = (__typeof__(lval)) _res; \
1310 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1312 volatile OrigFn _orig = (orig); \
1313 volatile unsigned long _argvec[7]; \
1314 volatile unsigned long _res; \
1315 _argvec[0] = (unsigned long)_orig.nraddr; \
1316 _argvec[1] = (unsigned long)(arg1); \
1317 _argvec[2] = (unsigned long)(arg2); \
1318 _argvec[3] = (unsigned long)(arg3); \
1319 _argvec[4] = (unsigned long)(arg4); \
1320 _argvec[5] = (unsigned long)(arg5); \
1321 _argvec[6] = (unsigned long)(arg6); \
1323 VALGRIND_ALIGN_STACK \
1324 "subl $8, %%esp\n\t" \
1325 "pushl 24(%%eax)\n\t" \
1326 "pushl 20(%%eax)\n\t" \
1327 "pushl 16(%%eax)\n\t" \
1328 "pushl 12(%%eax)\n\t" \
1329 "pushl 8(%%eax)\n\t" \
1330 "pushl 4(%%eax)\n\t" \
1331 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1332 VALGRIND_CALL_NOREDIR_EAX \
1333 VALGRIND_RESTORE_STACK \
1334 : /*out*/ "=a" (_res) \
1335 : /*in*/ "a" (&_argvec[0]) \
1336 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1338 lval = (__typeof__(lval)) _res; \
1341 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1344 volatile OrigFn _orig = (orig); \
1345 volatile unsigned long _argvec[8]; \
1346 volatile unsigned long _res; \
1347 _argvec[0] = (unsigned long)_orig.nraddr; \
1348 _argvec[1] = (unsigned long)(arg1); \
1349 _argvec[2] = (unsigned long)(arg2); \
1350 _argvec[3] = (unsigned long)(arg3); \
1351 _argvec[4] = (unsigned long)(arg4); \
1352 _argvec[5] = (unsigned long)(arg5); \
1353 _argvec[6] = (unsigned long)(arg6); \
1354 _argvec[7] = (unsigned long)(arg7); \
1356 VALGRIND_ALIGN_STACK \
1357 "subl $4, %%esp\n\t" \
1358 "pushl 28(%%eax)\n\t" \
1359 "pushl 24(%%eax)\n\t" \
1360 "pushl 20(%%eax)\n\t" \
1361 "pushl 16(%%eax)\n\t" \
1362 "pushl 12(%%eax)\n\t" \
1363 "pushl 8(%%eax)\n\t" \
1364 "pushl 4(%%eax)\n\t" \
1365 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1366 VALGRIND_CALL_NOREDIR_EAX \
1367 VALGRIND_RESTORE_STACK \
1368 : /*out*/ "=a" (_res) \
1369 : /*in*/ "a" (&_argvec[0]) \
1370 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1372 lval = (__typeof__(lval)) _res; \
1375 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1378 volatile OrigFn _orig = (orig); \
1379 volatile unsigned long _argvec[9]; \
1380 volatile unsigned long _res; \
1381 _argvec[0] = (unsigned long)_orig.nraddr; \
1382 _argvec[1] = (unsigned long)(arg1); \
1383 _argvec[2] = (unsigned long)(arg2); \
1384 _argvec[3] = (unsigned long)(arg3); \
1385 _argvec[4] = (unsigned long)(arg4); \
1386 _argvec[5] = (unsigned long)(arg5); \
1387 _argvec[6] = (unsigned long)(arg6); \
1388 _argvec[7] = (unsigned long)(arg7); \
1389 _argvec[8] = (unsigned long)(arg8); \
1391 VALGRIND_ALIGN_STACK \
1392 "pushl 32(%%eax)\n\t" \
1393 "pushl 28(%%eax)\n\t" \
1394 "pushl 24(%%eax)\n\t" \
1395 "pushl 20(%%eax)\n\t" \
1396 "pushl 16(%%eax)\n\t" \
1397 "pushl 12(%%eax)\n\t" \
1398 "pushl 8(%%eax)\n\t" \
1399 "pushl 4(%%eax)\n\t" \
1400 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1401 VALGRIND_CALL_NOREDIR_EAX \
1402 VALGRIND_RESTORE_STACK \
1403 : /*out*/ "=a" (_res) \
1404 : /*in*/ "a" (&_argvec[0]) \
1405 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1407 lval = (__typeof__(lval)) _res; \
1410 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1413 volatile OrigFn _orig = (orig); \
1414 volatile unsigned long _argvec[10]; \
1415 volatile unsigned long _res; \
1416 _argvec[0] = (unsigned long)_orig.nraddr; \
1417 _argvec[1] = (unsigned long)(arg1); \
1418 _argvec[2] = (unsigned long)(arg2); \
1419 _argvec[3] = (unsigned long)(arg3); \
1420 _argvec[4] = (unsigned long)(arg4); \
1421 _argvec[5] = (unsigned long)(arg5); \
1422 _argvec[6] = (unsigned long)(arg6); \
1423 _argvec[7] = (unsigned long)(arg7); \
1424 _argvec[8] = (unsigned long)(arg8); \
1425 _argvec[9] = (unsigned long)(arg9); \
1427 VALGRIND_ALIGN_STACK \
1428 "subl $12, %%esp\n\t" \
1429 "pushl 36(%%eax)\n\t" \
1430 "pushl 32(%%eax)\n\t" \
1431 "pushl 28(%%eax)\n\t" \
1432 "pushl 24(%%eax)\n\t" \
1433 "pushl 20(%%eax)\n\t" \
1434 "pushl 16(%%eax)\n\t" \
1435 "pushl 12(%%eax)\n\t" \
1436 "pushl 8(%%eax)\n\t" \
1437 "pushl 4(%%eax)\n\t" \
1438 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1439 VALGRIND_CALL_NOREDIR_EAX \
1440 VALGRIND_RESTORE_STACK \
1441 : /*out*/ "=a" (_res) \
1442 : /*in*/ "a" (&_argvec[0]) \
1443 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1445 lval = (__typeof__(lval)) _res; \
1448 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1449 arg7,arg8,arg9,arg10) \
1451 volatile OrigFn _orig = (orig); \
1452 volatile unsigned long _argvec[11]; \
1453 volatile unsigned long _res; \
1454 _argvec[0] = (unsigned long)_orig.nraddr; \
1455 _argvec[1] = (unsigned long)(arg1); \
1456 _argvec[2] = (unsigned long)(arg2); \
1457 _argvec[3] = (unsigned long)(arg3); \
1458 _argvec[4] = (unsigned long)(arg4); \
1459 _argvec[5] = (unsigned long)(arg5); \
1460 _argvec[6] = (unsigned long)(arg6); \
1461 _argvec[7] = (unsigned long)(arg7); \
1462 _argvec[8] = (unsigned long)(arg8); \
1463 _argvec[9] = (unsigned long)(arg9); \
1464 _argvec[10] = (unsigned long)(arg10); \
1466 VALGRIND_ALIGN_STACK \
1467 "subl $8, %%esp\n\t" \
1468 "pushl 40(%%eax)\n\t" \
1469 "pushl 36(%%eax)\n\t" \
1470 "pushl 32(%%eax)\n\t" \
1471 "pushl 28(%%eax)\n\t" \
1472 "pushl 24(%%eax)\n\t" \
1473 "pushl 20(%%eax)\n\t" \
1474 "pushl 16(%%eax)\n\t" \
1475 "pushl 12(%%eax)\n\t" \
1476 "pushl 8(%%eax)\n\t" \
1477 "pushl 4(%%eax)\n\t" \
1478 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1479 VALGRIND_CALL_NOREDIR_EAX \
1480 VALGRIND_RESTORE_STACK \
1481 : /*out*/ "=a" (_res) \
1482 : /*in*/ "a" (&_argvec[0]) \
1483 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1485 lval = (__typeof__(lval)) _res; \
1488 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1489 arg6,arg7,arg8,arg9,arg10, \
1492 volatile OrigFn _orig = (orig); \
1493 volatile unsigned long _argvec[12]; \
1494 volatile unsigned long _res; \
1495 _argvec[0] = (unsigned long)_orig.nraddr; \
1496 _argvec[1] = (unsigned long)(arg1); \
1497 _argvec[2] = (unsigned long)(arg2); \
1498 _argvec[3] = (unsigned long)(arg3); \
1499 _argvec[4] = (unsigned long)(arg4); \
1500 _argvec[5] = (unsigned long)(arg5); \
1501 _argvec[6] = (unsigned long)(arg6); \
1502 _argvec[7] = (unsigned long)(arg7); \
1503 _argvec[8] = (unsigned long)(arg8); \
1504 _argvec[9] = (unsigned long)(arg9); \
1505 _argvec[10] = (unsigned long)(arg10); \
1506 _argvec[11] = (unsigned long)(arg11); \
1508 VALGRIND_ALIGN_STACK \
1509 "subl $4, %%esp\n\t" \
1510 "pushl 44(%%eax)\n\t" \
1511 "pushl 40(%%eax)\n\t" \
1512 "pushl 36(%%eax)\n\t" \
1513 "pushl 32(%%eax)\n\t" \
1514 "pushl 28(%%eax)\n\t" \
1515 "pushl 24(%%eax)\n\t" \
1516 "pushl 20(%%eax)\n\t" \
1517 "pushl 16(%%eax)\n\t" \
1518 "pushl 12(%%eax)\n\t" \
1519 "pushl 8(%%eax)\n\t" \
1520 "pushl 4(%%eax)\n\t" \
1521 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1522 VALGRIND_CALL_NOREDIR_EAX \
1523 VALGRIND_RESTORE_STACK \
1524 : /*out*/ "=a" (_res) \
1525 : /*in*/ "a" (&_argvec[0]) \
1526 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1528 lval = (__typeof__(lval)) _res; \
1531 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1532 arg6,arg7,arg8,arg9,arg10, \
1535 volatile OrigFn _orig = (orig); \
1536 volatile unsigned long _argvec[13]; \
1537 volatile unsigned long _res; \
1538 _argvec[0] = (unsigned long)_orig.nraddr; \
1539 _argvec[1] = (unsigned long)(arg1); \
1540 _argvec[2] = (unsigned long)(arg2); \
1541 _argvec[3] = (unsigned long)(arg3); \
1542 _argvec[4] = (unsigned long)(arg4); \
1543 _argvec[5] = (unsigned long)(arg5); \
1544 _argvec[6] = (unsigned long)(arg6); \
1545 _argvec[7] = (unsigned long)(arg7); \
1546 _argvec[8] = (unsigned long)(arg8); \
1547 _argvec[9] = (unsigned long)(arg9); \
1548 _argvec[10] = (unsigned long)(arg10); \
1549 _argvec[11] = (unsigned long)(arg11); \
1550 _argvec[12] = (unsigned long)(arg12); \
1552 VALGRIND_ALIGN_STACK \
1553 "pushl 48(%%eax)\n\t" \
1554 "pushl 44(%%eax)\n\t" \
1555 "pushl 40(%%eax)\n\t" \
1556 "pushl 36(%%eax)\n\t" \
1557 "pushl 32(%%eax)\n\t" \
1558 "pushl 28(%%eax)\n\t" \
1559 "pushl 24(%%eax)\n\t" \
1560 "pushl 20(%%eax)\n\t" \
1561 "pushl 16(%%eax)\n\t" \
1562 "pushl 12(%%eax)\n\t" \
1563 "pushl 8(%%eax)\n\t" \
1564 "pushl 4(%%eax)\n\t" \
1565 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1566 VALGRIND_CALL_NOREDIR_EAX \
1567 VALGRIND_RESTORE_STACK \
1568 : /*out*/ "=a" (_res) \
1569 : /*in*/ "a" (&_argvec[0]) \
1570 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1572 lval = (__typeof__(lval)) _res; \
1575 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1577 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1579 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1580 || defined(PLAT_amd64_solaris)
1582 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1584 /* These regs are trashed by the hidden call. */
1585 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1586 "rdi", "r8", "r9", "r10", "r11"
1588 /* This is all pretty complex. It's so as to make stack unwinding
1589 work reliably. See bug 243270. The basic problem is the sub and
1590 add of 128 of %rsp in all of the following macros. If gcc believes
1591 the CFA is in %rsp, then unwinding may fail, because what's at the
1592 CFA is not what gcc "expected" when it constructs the CFIs for the
1593 places where the macros are instantiated.
1595 But we can't just add a CFI annotation to increase the CFA offset
1596 by 128, to match the sub of 128 from %rsp, because we don't know
1597 whether gcc has chosen %rsp as the CFA at that point, or whether it
1598 has chosen some other register (eg, %rbp). In the latter case,
1599 adding a CFI annotation to change the CFA offset is simply wrong.
1601 So the solution is to get hold of the CFA using
1602 __builtin_dwarf_cfa(), put it in a known register, and add a
1603 CFI annotation to say what the register is. We choose %rbp for
1604 this (perhaps perversely), because:
1606 (1) %rbp is already subject to unwinding. If a new register was
1607 chosen then the unwinder would have to unwind it in all stack
1608 traces, which is expensive, and
1610 (2) %rbp is already subject to precise exception updates in the
1611 JIT. If a new register was chosen, we'd have to have precise
1612 exceptions for it too, which reduces performance of the
1615 However .. one extra complication. We can't just whack the result
1616 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1617 list of trashed registers at the end of the inline assembly
1618 fragments; gcc won't allow %rbp to appear in that list. Hence
1619 instead we need to stash %rbp in %r15 for the duration of the asm,
1620 and say that %r15 is trashed instead. gcc seems happy to go with
1623 Oh .. and this all needs to be conditionalised so that it is
1624 unchanged from before this commit, when compiled with older gccs
1625 that don't support __builtin_dwarf_cfa. Furthermore, since
1626 this header file is freestanding, it has to be independent of
1627 config.h, and so the following conditionalisation cannot depend on
1628 configure time checks.
1630 Although it's not clear from
1631 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1632 this expression excludes Darwin.
1633 .cfi directives in Darwin assembly appear to be completely
1634 different and I haven't investigated how they work.
1636 For even more entertainment value, note we have to use the
1637 completely undocumented __builtin_dwarf_cfa(), which appears to
1638 really compute the CFA, whereas __builtin_frame_address(0) claims
1639 to but actually doesn't. See
1640 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1642 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1643 # define __FRAME_POINTER \
1644 ,"r"(__builtin_dwarf_cfa())
1645 # define VALGRIND_CFI_PROLOGUE \
1646 "movq %%rbp, %%r15\n\t" \
1647 "movq %2, %%rbp\n\t" \
1648 ".cfi_remember_state\n\t" \
1649 ".cfi_def_cfa rbp, 0\n\t"
1650 # define VALGRIND_CFI_EPILOGUE \
1651 "movq %%r15, %%rbp\n\t" \
1652 ".cfi_restore_state\n\t"
1654 # define __FRAME_POINTER
1655 # define VALGRIND_CFI_PROLOGUE
1656 # define VALGRIND_CFI_EPILOGUE
1659 /* Macros to save and align the stack before making a function
1660 call and restore it afterwards as gcc may not keep the stack
1661 pointer aligned if it doesn't realise calls are being made
1662 to other functions. */
1664 #define VALGRIND_ALIGN_STACK \
1665 "movq %%rsp,%%r14\n\t" \
1666 "andq $0xfffffffffffffff0,%%rsp\n\t"
1667 #define VALGRIND_RESTORE_STACK \
1668 "movq %%r14,%%rsp\n\t"
1670 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1673 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1674 macros. In order not to trash the stack redzone, we need to drop
1675 %rsp by 128 before the hidden call, and restore afterwards. The
1676 nastyness is that it is only by luck that the stack still appears
1677 to be unwindable during the hidden call - since then the behaviour
1678 of any routine using this macro does not match what the CFI data
1681 Why is this important? Imagine that a wrapper has a stack
1682 allocated local, and passes to the hidden call, a pointer to it.
1683 Because gcc does not know about the hidden call, it may allocate
1684 that local in the redzone. Unfortunately the hidden call may then
1685 trash it before it comes to use it. So we must step clear of the
1686 redzone, for the duration of the hidden call, to make it safe.
1688 Probably the same problem afflicts the other redzone-style ABIs too
1689 (ppc64-linux); but for those, the stack is
1690 self describing (none of this CFI nonsense) so at least messing
1691 with the stack pointer doesn't give a danger of non-unwindable
1694 #define CALL_FN_W_v(lval, orig) \
1696 volatile OrigFn _orig = (orig); \
1697 volatile unsigned long _argvec[1]; \
1698 volatile unsigned long _res; \
1699 _argvec[0] = (unsigned long)_orig.nraddr; \
1701 VALGRIND_CFI_PROLOGUE \
1702 VALGRIND_ALIGN_STACK \
1703 "subq $128,%%rsp\n\t" \
1704 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1705 VALGRIND_CALL_NOREDIR_RAX \
1706 VALGRIND_RESTORE_STACK \
1707 VALGRIND_CFI_EPILOGUE \
1708 : /*out*/ "=a" (_res) \
1709 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1710 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1712 lval = (__typeof__(lval)) _res; \
1715 #define CALL_FN_W_W(lval, orig, arg1) \
1717 volatile OrigFn _orig = (orig); \
1718 volatile unsigned long _argvec[2]; \
1719 volatile unsigned long _res; \
1720 _argvec[0] = (unsigned long)_orig.nraddr; \
1721 _argvec[1] = (unsigned long)(arg1); \
1723 VALGRIND_CFI_PROLOGUE \
1724 VALGRIND_ALIGN_STACK \
1725 "subq $128,%%rsp\n\t" \
1726 "movq 8(%%rax), %%rdi\n\t" \
1727 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1728 VALGRIND_CALL_NOREDIR_RAX \
1729 VALGRIND_RESTORE_STACK \
1730 VALGRIND_CFI_EPILOGUE \
1731 : /*out*/ "=a" (_res) \
1732 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1733 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1735 lval = (__typeof__(lval)) _res; \
1738 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1740 volatile OrigFn _orig = (orig); \
1741 volatile unsigned long _argvec[3]; \
1742 volatile unsigned long _res; \
1743 _argvec[0] = (unsigned long)_orig.nraddr; \
1744 _argvec[1] = (unsigned long)(arg1); \
1745 _argvec[2] = (unsigned long)(arg2); \
1747 VALGRIND_CFI_PROLOGUE \
1748 VALGRIND_ALIGN_STACK \
1749 "subq $128,%%rsp\n\t" \
1750 "movq 16(%%rax), %%rsi\n\t" \
1751 "movq 8(%%rax), %%rdi\n\t" \
1752 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1753 VALGRIND_CALL_NOREDIR_RAX \
1754 VALGRIND_RESTORE_STACK \
1755 VALGRIND_CFI_EPILOGUE \
1756 : /*out*/ "=a" (_res) \
1757 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1758 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1760 lval = (__typeof__(lval)) _res; \
1763 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1765 volatile OrigFn _orig = (orig); \
1766 volatile unsigned long _argvec[4]; \
1767 volatile unsigned long _res; \
1768 _argvec[0] = (unsigned long)_orig.nraddr; \
1769 _argvec[1] = (unsigned long)(arg1); \
1770 _argvec[2] = (unsigned long)(arg2); \
1771 _argvec[3] = (unsigned long)(arg3); \
1773 VALGRIND_CFI_PROLOGUE \
1774 VALGRIND_ALIGN_STACK \
1775 "subq $128,%%rsp\n\t" \
1776 "movq 24(%%rax), %%rdx\n\t" \
1777 "movq 16(%%rax), %%rsi\n\t" \
1778 "movq 8(%%rax), %%rdi\n\t" \
1779 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1780 VALGRIND_CALL_NOREDIR_RAX \
1781 VALGRIND_RESTORE_STACK \
1782 VALGRIND_CFI_EPILOGUE \
1783 : /*out*/ "=a" (_res) \
1784 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1785 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1787 lval = (__typeof__(lval)) _res; \
1790 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1792 volatile OrigFn _orig = (orig); \
1793 volatile unsigned long _argvec[5]; \
1794 volatile unsigned long _res; \
1795 _argvec[0] = (unsigned long)_orig.nraddr; \
1796 _argvec[1] = (unsigned long)(arg1); \
1797 _argvec[2] = (unsigned long)(arg2); \
1798 _argvec[3] = (unsigned long)(arg3); \
1799 _argvec[4] = (unsigned long)(arg4); \
1801 VALGRIND_CFI_PROLOGUE \
1802 VALGRIND_ALIGN_STACK \
1803 "subq $128,%%rsp\n\t" \
1804 "movq 32(%%rax), %%rcx\n\t" \
1805 "movq 24(%%rax), %%rdx\n\t" \
1806 "movq 16(%%rax), %%rsi\n\t" \
1807 "movq 8(%%rax), %%rdi\n\t" \
1808 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1809 VALGRIND_CALL_NOREDIR_RAX \
1810 VALGRIND_RESTORE_STACK \
1811 VALGRIND_CFI_EPILOGUE \
1812 : /*out*/ "=a" (_res) \
1813 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1814 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1816 lval = (__typeof__(lval)) _res; \
1819 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1821 volatile OrigFn _orig = (orig); \
1822 volatile unsigned long _argvec[6]; \
1823 volatile unsigned long _res; \
1824 _argvec[0] = (unsigned long)_orig.nraddr; \
1825 _argvec[1] = (unsigned long)(arg1); \
1826 _argvec[2] = (unsigned long)(arg2); \
1827 _argvec[3] = (unsigned long)(arg3); \
1828 _argvec[4] = (unsigned long)(arg4); \
1829 _argvec[5] = (unsigned long)(arg5); \
1831 VALGRIND_CFI_PROLOGUE \
1832 VALGRIND_ALIGN_STACK \
1833 "subq $128,%%rsp\n\t" \
1834 "movq 40(%%rax), %%r8\n\t" \
1835 "movq 32(%%rax), %%rcx\n\t" \
1836 "movq 24(%%rax), %%rdx\n\t" \
1837 "movq 16(%%rax), %%rsi\n\t" \
1838 "movq 8(%%rax), %%rdi\n\t" \
1839 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1840 VALGRIND_CALL_NOREDIR_RAX \
1841 VALGRIND_RESTORE_STACK \
1842 VALGRIND_CFI_EPILOGUE \
1843 : /*out*/ "=a" (_res) \
1844 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1845 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1847 lval = (__typeof__(lval)) _res; \
1850 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1852 volatile OrigFn _orig = (orig); \
1853 volatile unsigned long _argvec[7]; \
1854 volatile unsigned long _res; \
1855 _argvec[0] = (unsigned long)_orig.nraddr; \
1856 _argvec[1] = (unsigned long)(arg1); \
1857 _argvec[2] = (unsigned long)(arg2); \
1858 _argvec[3] = (unsigned long)(arg3); \
1859 _argvec[4] = (unsigned long)(arg4); \
1860 _argvec[5] = (unsigned long)(arg5); \
1861 _argvec[6] = (unsigned long)(arg6); \
1863 VALGRIND_CFI_PROLOGUE \
1864 VALGRIND_ALIGN_STACK \
1865 "subq $128,%%rsp\n\t" \
1866 "movq 48(%%rax), %%r9\n\t" \
1867 "movq 40(%%rax), %%r8\n\t" \
1868 "movq 32(%%rax), %%rcx\n\t" \
1869 "movq 24(%%rax), %%rdx\n\t" \
1870 "movq 16(%%rax), %%rsi\n\t" \
1871 "movq 8(%%rax), %%rdi\n\t" \
1872 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1873 VALGRIND_CALL_NOREDIR_RAX \
1874 VALGRIND_RESTORE_STACK \
1875 VALGRIND_CFI_EPILOGUE \
1876 : /*out*/ "=a" (_res) \
1877 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1878 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1880 lval = (__typeof__(lval)) _res; \
1883 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1886 volatile OrigFn _orig = (orig); \
1887 volatile unsigned long _argvec[8]; \
1888 volatile unsigned long _res; \
1889 _argvec[0] = (unsigned long)_orig.nraddr; \
1890 _argvec[1] = (unsigned long)(arg1); \
1891 _argvec[2] = (unsigned long)(arg2); \
1892 _argvec[3] = (unsigned long)(arg3); \
1893 _argvec[4] = (unsigned long)(arg4); \
1894 _argvec[5] = (unsigned long)(arg5); \
1895 _argvec[6] = (unsigned long)(arg6); \
1896 _argvec[7] = (unsigned long)(arg7); \
1898 VALGRIND_CFI_PROLOGUE \
1899 VALGRIND_ALIGN_STACK \
1900 "subq $136,%%rsp\n\t" \
1901 "pushq 56(%%rax)\n\t" \
1902 "movq 48(%%rax), %%r9\n\t" \
1903 "movq 40(%%rax), %%r8\n\t" \
1904 "movq 32(%%rax), %%rcx\n\t" \
1905 "movq 24(%%rax), %%rdx\n\t" \
1906 "movq 16(%%rax), %%rsi\n\t" \
1907 "movq 8(%%rax), %%rdi\n\t" \
1908 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1909 VALGRIND_CALL_NOREDIR_RAX \
1910 VALGRIND_RESTORE_STACK \
1911 VALGRIND_CFI_EPILOGUE \
1912 : /*out*/ "=a" (_res) \
1913 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1914 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1916 lval = (__typeof__(lval)) _res; \
1919 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1922 volatile OrigFn _orig = (orig); \
1923 volatile unsigned long _argvec[9]; \
1924 volatile unsigned long _res; \
1925 _argvec[0] = (unsigned long)_orig.nraddr; \
1926 _argvec[1] = (unsigned long)(arg1); \
1927 _argvec[2] = (unsigned long)(arg2); \
1928 _argvec[3] = (unsigned long)(arg3); \
1929 _argvec[4] = (unsigned long)(arg4); \
1930 _argvec[5] = (unsigned long)(arg5); \
1931 _argvec[6] = (unsigned long)(arg6); \
1932 _argvec[7] = (unsigned long)(arg7); \
1933 _argvec[8] = (unsigned long)(arg8); \
1935 VALGRIND_CFI_PROLOGUE \
1936 VALGRIND_ALIGN_STACK \
1937 "subq $128,%%rsp\n\t" \
1938 "pushq 64(%%rax)\n\t" \
1939 "pushq 56(%%rax)\n\t" \
1940 "movq 48(%%rax), %%r9\n\t" \
1941 "movq 40(%%rax), %%r8\n\t" \
1942 "movq 32(%%rax), %%rcx\n\t" \
1943 "movq 24(%%rax), %%rdx\n\t" \
1944 "movq 16(%%rax), %%rsi\n\t" \
1945 "movq 8(%%rax), %%rdi\n\t" \
1946 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1947 VALGRIND_CALL_NOREDIR_RAX \
1948 VALGRIND_RESTORE_STACK \
1949 VALGRIND_CFI_EPILOGUE \
1950 : /*out*/ "=a" (_res) \
1951 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1952 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1954 lval = (__typeof__(lval)) _res; \
1957 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1960 volatile OrigFn _orig = (orig); \
1961 volatile unsigned long _argvec[10]; \
1962 volatile unsigned long _res; \
1963 _argvec[0] = (unsigned long)_orig.nraddr; \
1964 _argvec[1] = (unsigned long)(arg1); \
1965 _argvec[2] = (unsigned long)(arg2); \
1966 _argvec[3] = (unsigned long)(arg3); \
1967 _argvec[4] = (unsigned long)(arg4); \
1968 _argvec[5] = (unsigned long)(arg5); \
1969 _argvec[6] = (unsigned long)(arg6); \
1970 _argvec[7] = (unsigned long)(arg7); \
1971 _argvec[8] = (unsigned long)(arg8); \
1972 _argvec[9] = (unsigned long)(arg9); \
1974 VALGRIND_CFI_PROLOGUE \
1975 VALGRIND_ALIGN_STACK \
1976 "subq $136,%%rsp\n\t" \
1977 "pushq 72(%%rax)\n\t" \
1978 "pushq 64(%%rax)\n\t" \
1979 "pushq 56(%%rax)\n\t" \
1980 "movq 48(%%rax), %%r9\n\t" \
1981 "movq 40(%%rax), %%r8\n\t" \
1982 "movq 32(%%rax), %%rcx\n\t" \
1983 "movq 24(%%rax), %%rdx\n\t" \
1984 "movq 16(%%rax), %%rsi\n\t" \
1985 "movq 8(%%rax), %%rdi\n\t" \
1986 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1987 VALGRIND_CALL_NOREDIR_RAX \
1988 VALGRIND_RESTORE_STACK \
1989 VALGRIND_CFI_EPILOGUE \
1990 : /*out*/ "=a" (_res) \
1991 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1992 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1994 lval = (__typeof__(lval)) _res; \
1997 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1998 arg7,arg8,arg9,arg10) \
2000 volatile OrigFn _orig = (orig); \
2001 volatile unsigned long _argvec[11]; \
2002 volatile unsigned long _res; \
2003 _argvec[0] = (unsigned long)_orig.nraddr; \
2004 _argvec[1] = (unsigned long)(arg1); \
2005 _argvec[2] = (unsigned long)(arg2); \
2006 _argvec[3] = (unsigned long)(arg3); \
2007 _argvec[4] = (unsigned long)(arg4); \
2008 _argvec[5] = (unsigned long)(arg5); \
2009 _argvec[6] = (unsigned long)(arg6); \
2010 _argvec[7] = (unsigned long)(arg7); \
2011 _argvec[8] = (unsigned long)(arg8); \
2012 _argvec[9] = (unsigned long)(arg9); \
2013 _argvec[10] = (unsigned long)(arg10); \
2015 VALGRIND_CFI_PROLOGUE \
2016 VALGRIND_ALIGN_STACK \
2017 "subq $128,%%rsp\n\t" \
2018 "pushq 80(%%rax)\n\t" \
2019 "pushq 72(%%rax)\n\t" \
2020 "pushq 64(%%rax)\n\t" \
2021 "pushq 56(%%rax)\n\t" \
2022 "movq 48(%%rax), %%r9\n\t" \
2023 "movq 40(%%rax), %%r8\n\t" \
2024 "movq 32(%%rax), %%rcx\n\t" \
2025 "movq 24(%%rax), %%rdx\n\t" \
2026 "movq 16(%%rax), %%rsi\n\t" \
2027 "movq 8(%%rax), %%rdi\n\t" \
2028 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2029 VALGRIND_CALL_NOREDIR_RAX \
2030 VALGRIND_RESTORE_STACK \
2031 VALGRIND_CFI_EPILOGUE \
2032 : /*out*/ "=a" (_res) \
2033 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2036 lval = (__typeof__(lval)) _res; \
2039 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2040 arg7,arg8,arg9,arg10,arg11) \
2042 volatile OrigFn _orig = (orig); \
2043 volatile unsigned long _argvec[12]; \
2044 volatile unsigned long _res; \
2045 _argvec[0] = (unsigned long)_orig.nraddr; \
2046 _argvec[1] = (unsigned long)(arg1); \
2047 _argvec[2] = (unsigned long)(arg2); \
2048 _argvec[3] = (unsigned long)(arg3); \
2049 _argvec[4] = (unsigned long)(arg4); \
2050 _argvec[5] = (unsigned long)(arg5); \
2051 _argvec[6] = (unsigned long)(arg6); \
2052 _argvec[7] = (unsigned long)(arg7); \
2053 _argvec[8] = (unsigned long)(arg8); \
2054 _argvec[9] = (unsigned long)(arg9); \
2055 _argvec[10] = (unsigned long)(arg10); \
2056 _argvec[11] = (unsigned long)(arg11); \
2058 VALGRIND_CFI_PROLOGUE \
2059 VALGRIND_ALIGN_STACK \
2060 "subq $136,%%rsp\n\t" \
2061 "pushq 88(%%rax)\n\t" \
2062 "pushq 80(%%rax)\n\t" \
2063 "pushq 72(%%rax)\n\t" \
2064 "pushq 64(%%rax)\n\t" \
2065 "pushq 56(%%rax)\n\t" \
2066 "movq 48(%%rax), %%r9\n\t" \
2067 "movq 40(%%rax), %%r8\n\t" \
2068 "movq 32(%%rax), %%rcx\n\t" \
2069 "movq 24(%%rax), %%rdx\n\t" \
2070 "movq 16(%%rax), %%rsi\n\t" \
2071 "movq 8(%%rax), %%rdi\n\t" \
2072 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2073 VALGRIND_CALL_NOREDIR_RAX \
2074 VALGRIND_RESTORE_STACK \
2075 VALGRIND_CFI_EPILOGUE \
2076 : /*out*/ "=a" (_res) \
2077 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2078 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2080 lval = (__typeof__(lval)) _res; \
2083 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2084 arg7,arg8,arg9,arg10,arg11,arg12) \
2086 volatile OrigFn _orig = (orig); \
2087 volatile unsigned long _argvec[13]; \
2088 volatile unsigned long _res; \
2089 _argvec[0] = (unsigned long)_orig.nraddr; \
2090 _argvec[1] = (unsigned long)(arg1); \
2091 _argvec[2] = (unsigned long)(arg2); \
2092 _argvec[3] = (unsigned long)(arg3); \
2093 _argvec[4] = (unsigned long)(arg4); \
2094 _argvec[5] = (unsigned long)(arg5); \
2095 _argvec[6] = (unsigned long)(arg6); \
2096 _argvec[7] = (unsigned long)(arg7); \
2097 _argvec[8] = (unsigned long)(arg8); \
2098 _argvec[9] = (unsigned long)(arg9); \
2099 _argvec[10] = (unsigned long)(arg10); \
2100 _argvec[11] = (unsigned long)(arg11); \
2101 _argvec[12] = (unsigned long)(arg12); \
2103 VALGRIND_CFI_PROLOGUE \
2104 VALGRIND_ALIGN_STACK \
2105 "subq $128,%%rsp\n\t" \
2106 "pushq 96(%%rax)\n\t" \
2107 "pushq 88(%%rax)\n\t" \
2108 "pushq 80(%%rax)\n\t" \
2109 "pushq 72(%%rax)\n\t" \
2110 "pushq 64(%%rax)\n\t" \
2111 "pushq 56(%%rax)\n\t" \
2112 "movq 48(%%rax), %%r9\n\t" \
2113 "movq 40(%%rax), %%r8\n\t" \
2114 "movq 32(%%rax), %%rcx\n\t" \
2115 "movq 24(%%rax), %%rdx\n\t" \
2116 "movq 16(%%rax), %%rsi\n\t" \
2117 "movq 8(%%rax), %%rdi\n\t" \
2118 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2119 VALGRIND_CALL_NOREDIR_RAX \
2120 VALGRIND_RESTORE_STACK \
2121 VALGRIND_CFI_EPILOGUE \
2122 : /*out*/ "=a" (_res) \
2123 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2124 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2126 lval = (__typeof__(lval)) _res; \
2129 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2131 /* ------------------------ ppc32-linux ------------------------ */
2133 #if defined(PLAT_ppc32_linux)
2135 /* This is useful for finding out about the on-stack stuff:
2137 extern int f9 ( int,int,int,int,int,int,int,int,int );
2138 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2139 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2140 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2143 return f9(11,22,33,44,55,66,77,88,99);
2146 return f10(11,22,33,44,55,66,77,88,99,110);
2149 return f11(11,22,33,44,55,66,77,88,99,110,121);
2152 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2156 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2158 /* These regs are trashed by the hidden call. */
2159 #define __CALLER_SAVED_REGS \
2160 "lr", "ctr", "xer", \
2161 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2162 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2165 /* Macros to save and align the stack before making a function
2166 call and restore it afterwards as gcc may not keep the stack
2167 pointer aligned if it doesn't realise calls are being made
2168 to other functions. */
2170 #define VALGRIND_ALIGN_STACK \
2172 "rlwinm 1,1,0,0,27\n\t"
2173 #define VALGRIND_RESTORE_STACK \
2176 /* These CALL_FN_ macros assume that on ppc32-linux,
2177 sizeof(unsigned long) == 4. */
2179 #define CALL_FN_W_v(lval, orig) \
2181 volatile OrigFn _orig = (orig); \
2182 volatile unsigned long _argvec[1]; \
2183 volatile unsigned long _res; \
2184 _argvec[0] = (unsigned long)_orig.nraddr; \
2186 VALGRIND_ALIGN_STACK \
2188 "lwz 11,0(11)\n\t" /* target->r11 */ \
2189 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2190 VALGRIND_RESTORE_STACK \
2192 : /*out*/ "=r" (_res) \
2193 : /*in*/ "r" (&_argvec[0]) \
2194 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2196 lval = (__typeof__(lval)) _res; \
2199 #define CALL_FN_W_W(lval, orig, arg1) \
2201 volatile OrigFn _orig = (orig); \
2202 volatile unsigned long _argvec[2]; \
2203 volatile unsigned long _res; \
2204 _argvec[0] = (unsigned long)_orig.nraddr; \
2205 _argvec[1] = (unsigned long)arg1; \
2207 VALGRIND_ALIGN_STACK \
2209 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2210 "lwz 11,0(11)\n\t" /* target->r11 */ \
2211 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2212 VALGRIND_RESTORE_STACK \
2214 : /*out*/ "=r" (_res) \
2215 : /*in*/ "r" (&_argvec[0]) \
2216 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2218 lval = (__typeof__(lval)) _res; \
2221 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2223 volatile OrigFn _orig = (orig); \
2224 volatile unsigned long _argvec[3]; \
2225 volatile unsigned long _res; \
2226 _argvec[0] = (unsigned long)_orig.nraddr; \
2227 _argvec[1] = (unsigned long)arg1; \
2228 _argvec[2] = (unsigned long)arg2; \
2230 VALGRIND_ALIGN_STACK \
2232 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2234 "lwz 11,0(11)\n\t" /* target->r11 */ \
2235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2236 VALGRIND_RESTORE_STACK \
2238 : /*out*/ "=r" (_res) \
2239 : /*in*/ "r" (&_argvec[0]) \
2240 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2242 lval = (__typeof__(lval)) _res; \
2245 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2247 volatile OrigFn _orig = (orig); \
2248 volatile unsigned long _argvec[4]; \
2249 volatile unsigned long _res; \
2250 _argvec[0] = (unsigned long)_orig.nraddr; \
2251 _argvec[1] = (unsigned long)arg1; \
2252 _argvec[2] = (unsigned long)arg2; \
2253 _argvec[3] = (unsigned long)arg3; \
2255 VALGRIND_ALIGN_STACK \
2257 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2259 "lwz 5,12(11)\n\t" \
2260 "lwz 11,0(11)\n\t" /* target->r11 */ \
2261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2262 VALGRIND_RESTORE_STACK \
2264 : /*out*/ "=r" (_res) \
2265 : /*in*/ "r" (&_argvec[0]) \
2266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2268 lval = (__typeof__(lval)) _res; \
2271 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2273 volatile OrigFn _orig = (orig); \
2274 volatile unsigned long _argvec[5]; \
2275 volatile unsigned long _res; \
2276 _argvec[0] = (unsigned long)_orig.nraddr; \
2277 _argvec[1] = (unsigned long)arg1; \
2278 _argvec[2] = (unsigned long)arg2; \
2279 _argvec[3] = (unsigned long)arg3; \
2280 _argvec[4] = (unsigned long)arg4; \
2282 VALGRIND_ALIGN_STACK \
2284 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2286 "lwz 5,12(11)\n\t" \
2287 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2288 "lwz 11,0(11)\n\t" /* target->r11 */ \
2289 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2290 VALGRIND_RESTORE_STACK \
2292 : /*out*/ "=r" (_res) \
2293 : /*in*/ "r" (&_argvec[0]) \
2294 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2296 lval = (__typeof__(lval)) _res; \
2299 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2301 volatile OrigFn _orig = (orig); \
2302 volatile unsigned long _argvec[6]; \
2303 volatile unsigned long _res; \
2304 _argvec[0] = (unsigned long)_orig.nraddr; \
2305 _argvec[1] = (unsigned long)arg1; \
2306 _argvec[2] = (unsigned long)arg2; \
2307 _argvec[3] = (unsigned long)arg3; \
2308 _argvec[4] = (unsigned long)arg4; \
2309 _argvec[5] = (unsigned long)arg5; \
2311 VALGRIND_ALIGN_STACK \
2313 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2315 "lwz 5,12(11)\n\t" \
2316 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2317 "lwz 7,20(11)\n\t" \
2318 "lwz 11,0(11)\n\t" /* target->r11 */ \
2319 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2320 VALGRIND_RESTORE_STACK \
2322 : /*out*/ "=r" (_res) \
2323 : /*in*/ "r" (&_argvec[0]) \
2324 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2326 lval = (__typeof__(lval)) _res; \
2329 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2331 volatile OrigFn _orig = (orig); \
2332 volatile unsigned long _argvec[7]; \
2333 volatile unsigned long _res; \
2334 _argvec[0] = (unsigned long)_orig.nraddr; \
2335 _argvec[1] = (unsigned long)arg1; \
2336 _argvec[2] = (unsigned long)arg2; \
2337 _argvec[3] = (unsigned long)arg3; \
2338 _argvec[4] = (unsigned long)arg4; \
2339 _argvec[5] = (unsigned long)arg5; \
2340 _argvec[6] = (unsigned long)arg6; \
2342 VALGRIND_ALIGN_STACK \
2344 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2346 "lwz 5,12(11)\n\t" \
2347 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2348 "lwz 7,20(11)\n\t" \
2349 "lwz 8,24(11)\n\t" \
2350 "lwz 11,0(11)\n\t" /* target->r11 */ \
2351 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2352 VALGRIND_RESTORE_STACK \
2354 : /*out*/ "=r" (_res) \
2355 : /*in*/ "r" (&_argvec[0]) \
2356 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2358 lval = (__typeof__(lval)) _res; \
2361 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2364 volatile OrigFn _orig = (orig); \
2365 volatile unsigned long _argvec[8]; \
2366 volatile unsigned long _res; \
2367 _argvec[0] = (unsigned long)_orig.nraddr; \
2368 _argvec[1] = (unsigned long)arg1; \
2369 _argvec[2] = (unsigned long)arg2; \
2370 _argvec[3] = (unsigned long)arg3; \
2371 _argvec[4] = (unsigned long)arg4; \
2372 _argvec[5] = (unsigned long)arg5; \
2373 _argvec[6] = (unsigned long)arg6; \
2374 _argvec[7] = (unsigned long)arg7; \
2376 VALGRIND_ALIGN_STACK \
2378 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2380 "lwz 5,12(11)\n\t" \
2381 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2382 "lwz 7,20(11)\n\t" \
2383 "lwz 8,24(11)\n\t" \
2384 "lwz 9,28(11)\n\t" \
2385 "lwz 11,0(11)\n\t" /* target->r11 */ \
2386 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2387 VALGRIND_RESTORE_STACK \
2389 : /*out*/ "=r" (_res) \
2390 : /*in*/ "r" (&_argvec[0]) \
2391 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2393 lval = (__typeof__(lval)) _res; \
2396 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2399 volatile OrigFn _orig = (orig); \
2400 volatile unsigned long _argvec[9]; \
2401 volatile unsigned long _res; \
2402 _argvec[0] = (unsigned long)_orig.nraddr; \
2403 _argvec[1] = (unsigned long)arg1; \
2404 _argvec[2] = (unsigned long)arg2; \
2405 _argvec[3] = (unsigned long)arg3; \
2406 _argvec[4] = (unsigned long)arg4; \
2407 _argvec[5] = (unsigned long)arg5; \
2408 _argvec[6] = (unsigned long)arg6; \
2409 _argvec[7] = (unsigned long)arg7; \
2410 _argvec[8] = (unsigned long)arg8; \
2412 VALGRIND_ALIGN_STACK \
2414 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2416 "lwz 5,12(11)\n\t" \
2417 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2418 "lwz 7,20(11)\n\t" \
2419 "lwz 8,24(11)\n\t" \
2420 "lwz 9,28(11)\n\t" \
2421 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2422 "lwz 11,0(11)\n\t" /* target->r11 */ \
2423 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2424 VALGRIND_RESTORE_STACK \
2426 : /*out*/ "=r" (_res) \
2427 : /*in*/ "r" (&_argvec[0]) \
2428 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2430 lval = (__typeof__(lval)) _res; \
2433 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2436 volatile OrigFn _orig = (orig); \
2437 volatile unsigned long _argvec[10]; \
2438 volatile unsigned long _res; \
2439 _argvec[0] = (unsigned long)_orig.nraddr; \
2440 _argvec[1] = (unsigned long)arg1; \
2441 _argvec[2] = (unsigned long)arg2; \
2442 _argvec[3] = (unsigned long)arg3; \
2443 _argvec[4] = (unsigned long)arg4; \
2444 _argvec[5] = (unsigned long)arg5; \
2445 _argvec[6] = (unsigned long)arg6; \
2446 _argvec[7] = (unsigned long)arg7; \
2447 _argvec[8] = (unsigned long)arg8; \
2448 _argvec[9] = (unsigned long)arg9; \
2450 VALGRIND_ALIGN_STACK \
2452 "addi 1,1,-16\n\t" \
2454 "lwz 3,36(11)\n\t" \
2457 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2459 "lwz 5,12(11)\n\t" \
2460 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2461 "lwz 7,20(11)\n\t" \
2462 "lwz 8,24(11)\n\t" \
2463 "lwz 9,28(11)\n\t" \
2464 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2465 "lwz 11,0(11)\n\t" /* target->r11 */ \
2466 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2467 VALGRIND_RESTORE_STACK \
2469 : /*out*/ "=r" (_res) \
2470 : /*in*/ "r" (&_argvec[0]) \
2471 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2473 lval = (__typeof__(lval)) _res; \
2476 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2477 arg7,arg8,arg9,arg10) \
2479 volatile OrigFn _orig = (orig); \
2480 volatile unsigned long _argvec[11]; \
2481 volatile unsigned long _res; \
2482 _argvec[0] = (unsigned long)_orig.nraddr; \
2483 _argvec[1] = (unsigned long)arg1; \
2484 _argvec[2] = (unsigned long)arg2; \
2485 _argvec[3] = (unsigned long)arg3; \
2486 _argvec[4] = (unsigned long)arg4; \
2487 _argvec[5] = (unsigned long)arg5; \
2488 _argvec[6] = (unsigned long)arg6; \
2489 _argvec[7] = (unsigned long)arg7; \
2490 _argvec[8] = (unsigned long)arg8; \
2491 _argvec[9] = (unsigned long)arg9; \
2492 _argvec[10] = (unsigned long)arg10; \
2494 VALGRIND_ALIGN_STACK \
2496 "addi 1,1,-16\n\t" \
2498 "lwz 3,40(11)\n\t" \
2501 "lwz 3,36(11)\n\t" \
2504 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2506 "lwz 5,12(11)\n\t" \
2507 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2508 "lwz 7,20(11)\n\t" \
2509 "lwz 8,24(11)\n\t" \
2510 "lwz 9,28(11)\n\t" \
2511 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2512 "lwz 11,0(11)\n\t" /* target->r11 */ \
2513 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2514 VALGRIND_RESTORE_STACK \
2516 : /*out*/ "=r" (_res) \
2517 : /*in*/ "r" (&_argvec[0]) \
2518 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2520 lval = (__typeof__(lval)) _res; \
2523 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2524 arg7,arg8,arg9,arg10,arg11) \
2526 volatile OrigFn _orig = (orig); \
2527 volatile unsigned long _argvec[12]; \
2528 volatile unsigned long _res; \
2529 _argvec[0] = (unsigned long)_orig.nraddr; \
2530 _argvec[1] = (unsigned long)arg1; \
2531 _argvec[2] = (unsigned long)arg2; \
2532 _argvec[3] = (unsigned long)arg3; \
2533 _argvec[4] = (unsigned long)arg4; \
2534 _argvec[5] = (unsigned long)arg5; \
2535 _argvec[6] = (unsigned long)arg6; \
2536 _argvec[7] = (unsigned long)arg7; \
2537 _argvec[8] = (unsigned long)arg8; \
2538 _argvec[9] = (unsigned long)arg9; \
2539 _argvec[10] = (unsigned long)arg10; \
2540 _argvec[11] = (unsigned long)arg11; \
2542 VALGRIND_ALIGN_STACK \
2544 "addi 1,1,-32\n\t" \
2546 "lwz 3,44(11)\n\t" \
2549 "lwz 3,40(11)\n\t" \
2552 "lwz 3,36(11)\n\t" \
2555 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2557 "lwz 5,12(11)\n\t" \
2558 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2559 "lwz 7,20(11)\n\t" \
2560 "lwz 8,24(11)\n\t" \
2561 "lwz 9,28(11)\n\t" \
2562 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2563 "lwz 11,0(11)\n\t" /* target->r11 */ \
2564 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2565 VALGRIND_RESTORE_STACK \
2567 : /*out*/ "=r" (_res) \
2568 : /*in*/ "r" (&_argvec[0]) \
2569 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2571 lval = (__typeof__(lval)) _res; \
2574 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2575 arg7,arg8,arg9,arg10,arg11,arg12) \
2577 volatile OrigFn _orig = (orig); \
2578 volatile unsigned long _argvec[13]; \
2579 volatile unsigned long _res; \
2580 _argvec[0] = (unsigned long)_orig.nraddr; \
2581 _argvec[1] = (unsigned long)arg1; \
2582 _argvec[2] = (unsigned long)arg2; \
2583 _argvec[3] = (unsigned long)arg3; \
2584 _argvec[4] = (unsigned long)arg4; \
2585 _argvec[5] = (unsigned long)arg5; \
2586 _argvec[6] = (unsigned long)arg6; \
2587 _argvec[7] = (unsigned long)arg7; \
2588 _argvec[8] = (unsigned long)arg8; \
2589 _argvec[9] = (unsigned long)arg9; \
2590 _argvec[10] = (unsigned long)arg10; \
2591 _argvec[11] = (unsigned long)arg11; \
2592 _argvec[12] = (unsigned long)arg12; \
2594 VALGRIND_ALIGN_STACK \
2596 "addi 1,1,-32\n\t" \
2598 "lwz 3,48(11)\n\t" \
2601 "lwz 3,44(11)\n\t" \
2604 "lwz 3,40(11)\n\t" \
2607 "lwz 3,36(11)\n\t" \
2610 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2612 "lwz 5,12(11)\n\t" \
2613 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2614 "lwz 7,20(11)\n\t" \
2615 "lwz 8,24(11)\n\t" \
2616 "lwz 9,28(11)\n\t" \
2617 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2618 "lwz 11,0(11)\n\t" /* target->r11 */ \
2619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2620 VALGRIND_RESTORE_STACK \
2622 : /*out*/ "=r" (_res) \
2623 : /*in*/ "r" (&_argvec[0]) \
2624 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2626 lval = (__typeof__(lval)) _res; \
2629 #endif /* PLAT_ppc32_linux */
2631 /* ------------------------ ppc64-linux ------------------------ */
2633 #if defined(PLAT_ppc64be_linux)
2635 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2637 /* These regs are trashed by the hidden call. */
2638 #define __CALLER_SAVED_REGS \
2639 "lr", "ctr", "xer", \
2640 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2641 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2644 /* Macros to save and align the stack before making a function
2645 call and restore it afterwards as gcc may not keep the stack
2646 pointer aligned if it doesn't realise calls are being made
2647 to other functions. */
2649 #define VALGRIND_ALIGN_STACK \
2651 "rldicr 1,1,0,59\n\t"
2652 #define VALGRIND_RESTORE_STACK \
2655 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2658 #define CALL_FN_W_v(lval, orig) \
2660 volatile OrigFn _orig = (orig); \
2661 volatile unsigned long _argvec[3+0]; \
2662 volatile unsigned long _res; \
2663 /* _argvec[0] holds current r2 across the call */ \
2664 _argvec[1] = (unsigned long)_orig.r2; \
2665 _argvec[2] = (unsigned long)_orig.nraddr; \
2667 VALGRIND_ALIGN_STACK \
2669 "std 2,-16(11)\n\t" /* save tocptr */ \
2670 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2671 "ld 11, 0(11)\n\t" /* target->r11 */ \
2672 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2675 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2676 VALGRIND_RESTORE_STACK \
2677 : /*out*/ "=r" (_res) \
2678 : /*in*/ "r" (&_argvec[2]) \
2679 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2681 lval = (__typeof__(lval)) _res; \
2684 #define CALL_FN_W_W(lval, orig, arg1) \
2686 volatile OrigFn _orig = (orig); \
2687 volatile unsigned long _argvec[3+1]; \
2688 volatile unsigned long _res; \
2689 /* _argvec[0] holds current r2 across the call */ \
2690 _argvec[1] = (unsigned long)_orig.r2; \
2691 _argvec[2] = (unsigned long)_orig.nraddr; \
2692 _argvec[2+1] = (unsigned long)arg1; \
2694 VALGRIND_ALIGN_STACK \
2696 "std 2,-16(11)\n\t" /* save tocptr */ \
2697 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2698 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2699 "ld 11, 0(11)\n\t" /* target->r11 */ \
2700 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2703 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2704 VALGRIND_RESTORE_STACK \
2705 : /*out*/ "=r" (_res) \
2706 : /*in*/ "r" (&_argvec[2]) \
2707 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2709 lval = (__typeof__(lval)) _res; \
2712 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2714 volatile OrigFn _orig = (orig); \
2715 volatile unsigned long _argvec[3+2]; \
2716 volatile unsigned long _res; \
2717 /* _argvec[0] holds current r2 across the call */ \
2718 _argvec[1] = (unsigned long)_orig.r2; \
2719 _argvec[2] = (unsigned long)_orig.nraddr; \
2720 _argvec[2+1] = (unsigned long)arg1; \
2721 _argvec[2+2] = (unsigned long)arg2; \
2723 VALGRIND_ALIGN_STACK \
2725 "std 2,-16(11)\n\t" /* save tocptr */ \
2726 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2727 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2728 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2729 "ld 11, 0(11)\n\t" /* target->r11 */ \
2730 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2733 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2734 VALGRIND_RESTORE_STACK \
2735 : /*out*/ "=r" (_res) \
2736 : /*in*/ "r" (&_argvec[2]) \
2737 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2739 lval = (__typeof__(lval)) _res; \
2742 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2744 volatile OrigFn _orig = (orig); \
2745 volatile unsigned long _argvec[3+3]; \
2746 volatile unsigned long _res; \
2747 /* _argvec[0] holds current r2 across the call */ \
2748 _argvec[1] = (unsigned long)_orig.r2; \
2749 _argvec[2] = (unsigned long)_orig.nraddr; \
2750 _argvec[2+1] = (unsigned long)arg1; \
2751 _argvec[2+2] = (unsigned long)arg2; \
2752 _argvec[2+3] = (unsigned long)arg3; \
2754 VALGRIND_ALIGN_STACK \
2756 "std 2,-16(11)\n\t" /* save tocptr */ \
2757 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2758 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2759 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2760 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2761 "ld 11, 0(11)\n\t" /* target->r11 */ \
2762 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2765 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2766 VALGRIND_RESTORE_STACK \
2767 : /*out*/ "=r" (_res) \
2768 : /*in*/ "r" (&_argvec[2]) \
2769 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2771 lval = (__typeof__(lval)) _res; \
2774 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2776 volatile OrigFn _orig = (orig); \
2777 volatile unsigned long _argvec[3+4]; \
2778 volatile unsigned long _res; \
2779 /* _argvec[0] holds current r2 across the call */ \
2780 _argvec[1] = (unsigned long)_orig.r2; \
2781 _argvec[2] = (unsigned long)_orig.nraddr; \
2782 _argvec[2+1] = (unsigned long)arg1; \
2783 _argvec[2+2] = (unsigned long)arg2; \
2784 _argvec[2+3] = (unsigned long)arg3; \
2785 _argvec[2+4] = (unsigned long)arg4; \
2787 VALGRIND_ALIGN_STACK \
2789 "std 2,-16(11)\n\t" /* save tocptr */ \
2790 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2791 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2792 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2793 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2794 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2795 "ld 11, 0(11)\n\t" /* target->r11 */ \
2796 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2799 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2800 VALGRIND_RESTORE_STACK \
2801 : /*out*/ "=r" (_res) \
2802 : /*in*/ "r" (&_argvec[2]) \
2803 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2805 lval = (__typeof__(lval)) _res; \
2808 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2810 volatile OrigFn _orig = (orig); \
2811 volatile unsigned long _argvec[3+5]; \
2812 volatile unsigned long _res; \
2813 /* _argvec[0] holds current r2 across the call */ \
2814 _argvec[1] = (unsigned long)_orig.r2; \
2815 _argvec[2] = (unsigned long)_orig.nraddr; \
2816 _argvec[2+1] = (unsigned long)arg1; \
2817 _argvec[2+2] = (unsigned long)arg2; \
2818 _argvec[2+3] = (unsigned long)arg3; \
2819 _argvec[2+4] = (unsigned long)arg4; \
2820 _argvec[2+5] = (unsigned long)arg5; \
2822 VALGRIND_ALIGN_STACK \
2824 "std 2,-16(11)\n\t" /* save tocptr */ \
2825 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2826 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2827 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2828 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2829 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2830 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2831 "ld 11, 0(11)\n\t" /* target->r11 */ \
2832 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2835 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2836 VALGRIND_RESTORE_STACK \
2837 : /*out*/ "=r" (_res) \
2838 : /*in*/ "r" (&_argvec[2]) \
2839 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2841 lval = (__typeof__(lval)) _res; \
2844 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2846 volatile OrigFn _orig = (orig); \
2847 volatile unsigned long _argvec[3+6]; \
2848 volatile unsigned long _res; \
2849 /* _argvec[0] holds current r2 across the call */ \
2850 _argvec[1] = (unsigned long)_orig.r2; \
2851 _argvec[2] = (unsigned long)_orig.nraddr; \
2852 _argvec[2+1] = (unsigned long)arg1; \
2853 _argvec[2+2] = (unsigned long)arg2; \
2854 _argvec[2+3] = (unsigned long)arg3; \
2855 _argvec[2+4] = (unsigned long)arg4; \
2856 _argvec[2+5] = (unsigned long)arg5; \
2857 _argvec[2+6] = (unsigned long)arg6; \
2859 VALGRIND_ALIGN_STACK \
2861 "std 2,-16(11)\n\t" /* save tocptr */ \
2862 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2863 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2864 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2865 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2866 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2867 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2868 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2869 "ld 11, 0(11)\n\t" /* target->r11 */ \
2870 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2873 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2874 VALGRIND_RESTORE_STACK \
2875 : /*out*/ "=r" (_res) \
2876 : /*in*/ "r" (&_argvec[2]) \
2877 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2879 lval = (__typeof__(lval)) _res; \
2882 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2885 volatile OrigFn _orig = (orig); \
2886 volatile unsigned long _argvec[3+7]; \
2887 volatile unsigned long _res; \
2888 /* _argvec[0] holds current r2 across the call */ \
2889 _argvec[1] = (unsigned long)_orig.r2; \
2890 _argvec[2] = (unsigned long)_orig.nraddr; \
2891 _argvec[2+1] = (unsigned long)arg1; \
2892 _argvec[2+2] = (unsigned long)arg2; \
2893 _argvec[2+3] = (unsigned long)arg3; \
2894 _argvec[2+4] = (unsigned long)arg4; \
2895 _argvec[2+5] = (unsigned long)arg5; \
2896 _argvec[2+6] = (unsigned long)arg6; \
2897 _argvec[2+7] = (unsigned long)arg7; \
2899 VALGRIND_ALIGN_STACK \
2901 "std 2,-16(11)\n\t" /* save tocptr */ \
2902 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2903 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2904 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2905 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2906 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2907 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2908 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2909 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2910 "ld 11, 0(11)\n\t" /* target->r11 */ \
2911 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2914 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2915 VALGRIND_RESTORE_STACK \
2916 : /*out*/ "=r" (_res) \
2917 : /*in*/ "r" (&_argvec[2]) \
2918 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2920 lval = (__typeof__(lval)) _res; \
2923 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2926 volatile OrigFn _orig = (orig); \
2927 volatile unsigned long _argvec[3+8]; \
2928 volatile unsigned long _res; \
2929 /* _argvec[0] holds current r2 across the call */ \
2930 _argvec[1] = (unsigned long)_orig.r2; \
2931 _argvec[2] = (unsigned long)_orig.nraddr; \
2932 _argvec[2+1] = (unsigned long)arg1; \
2933 _argvec[2+2] = (unsigned long)arg2; \
2934 _argvec[2+3] = (unsigned long)arg3; \
2935 _argvec[2+4] = (unsigned long)arg4; \
2936 _argvec[2+5] = (unsigned long)arg5; \
2937 _argvec[2+6] = (unsigned long)arg6; \
2938 _argvec[2+7] = (unsigned long)arg7; \
2939 _argvec[2+8] = (unsigned long)arg8; \
2941 VALGRIND_ALIGN_STACK \
2943 "std 2,-16(11)\n\t" /* save tocptr */ \
2944 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2945 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2946 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2947 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2948 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2949 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2950 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2951 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2952 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2953 "ld 11, 0(11)\n\t" /* target->r11 */ \
2954 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2957 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2958 VALGRIND_RESTORE_STACK \
2959 : /*out*/ "=r" (_res) \
2960 : /*in*/ "r" (&_argvec[2]) \
2961 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2963 lval = (__typeof__(lval)) _res; \
2966 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2969 volatile OrigFn _orig = (orig); \
2970 volatile unsigned long _argvec[3+9]; \
2971 volatile unsigned long _res; \
2972 /* _argvec[0] holds current r2 across the call */ \
2973 _argvec[1] = (unsigned long)_orig.r2; \
2974 _argvec[2] = (unsigned long)_orig.nraddr; \
2975 _argvec[2+1] = (unsigned long)arg1; \
2976 _argvec[2+2] = (unsigned long)arg2; \
2977 _argvec[2+3] = (unsigned long)arg3; \
2978 _argvec[2+4] = (unsigned long)arg4; \
2979 _argvec[2+5] = (unsigned long)arg5; \
2980 _argvec[2+6] = (unsigned long)arg6; \
2981 _argvec[2+7] = (unsigned long)arg7; \
2982 _argvec[2+8] = (unsigned long)arg8; \
2983 _argvec[2+9] = (unsigned long)arg9; \
2985 VALGRIND_ALIGN_STACK \
2987 "std 2,-16(11)\n\t" /* save tocptr */ \
2988 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2989 "addi 1,1,-128\n\t" /* expand stack frame */ \
2992 "std 3,112(1)\n\t" \
2994 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2995 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2996 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2997 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2998 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2999 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3000 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3001 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3002 "ld 11, 0(11)\n\t" /* target->r11 */ \
3003 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3006 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3007 VALGRIND_RESTORE_STACK \
3008 : /*out*/ "=r" (_res) \
3009 : /*in*/ "r" (&_argvec[2]) \
3010 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3012 lval = (__typeof__(lval)) _res; \
3015 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3016 arg7,arg8,arg9,arg10) \
3018 volatile OrigFn _orig = (orig); \
3019 volatile unsigned long _argvec[3+10]; \
3020 volatile unsigned long _res; \
3021 /* _argvec[0] holds current r2 across the call */ \
3022 _argvec[1] = (unsigned long)_orig.r2; \
3023 _argvec[2] = (unsigned long)_orig.nraddr; \
3024 _argvec[2+1] = (unsigned long)arg1; \
3025 _argvec[2+2] = (unsigned long)arg2; \
3026 _argvec[2+3] = (unsigned long)arg3; \
3027 _argvec[2+4] = (unsigned long)arg4; \
3028 _argvec[2+5] = (unsigned long)arg5; \
3029 _argvec[2+6] = (unsigned long)arg6; \
3030 _argvec[2+7] = (unsigned long)arg7; \
3031 _argvec[2+8] = (unsigned long)arg8; \
3032 _argvec[2+9] = (unsigned long)arg9; \
3033 _argvec[2+10] = (unsigned long)arg10; \
3035 VALGRIND_ALIGN_STACK \
3037 "std 2,-16(11)\n\t" /* save tocptr */ \
3038 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3039 "addi 1,1,-128\n\t" /* expand stack frame */ \
3042 "std 3,120(1)\n\t" \
3045 "std 3,112(1)\n\t" \
3047 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3048 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3049 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3050 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3051 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3052 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3053 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3054 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3055 "ld 11, 0(11)\n\t" /* target->r11 */ \
3056 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3059 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3060 VALGRIND_RESTORE_STACK \
3061 : /*out*/ "=r" (_res) \
3062 : /*in*/ "r" (&_argvec[2]) \
3063 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3065 lval = (__typeof__(lval)) _res; \
3068 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3069 arg7,arg8,arg9,arg10,arg11) \
3071 volatile OrigFn _orig = (orig); \
3072 volatile unsigned long _argvec[3+11]; \
3073 volatile unsigned long _res; \
3074 /* _argvec[0] holds current r2 across the call */ \
3075 _argvec[1] = (unsigned long)_orig.r2; \
3076 _argvec[2] = (unsigned long)_orig.nraddr; \
3077 _argvec[2+1] = (unsigned long)arg1; \
3078 _argvec[2+2] = (unsigned long)arg2; \
3079 _argvec[2+3] = (unsigned long)arg3; \
3080 _argvec[2+4] = (unsigned long)arg4; \
3081 _argvec[2+5] = (unsigned long)arg5; \
3082 _argvec[2+6] = (unsigned long)arg6; \
3083 _argvec[2+7] = (unsigned long)arg7; \
3084 _argvec[2+8] = (unsigned long)arg8; \
3085 _argvec[2+9] = (unsigned long)arg9; \
3086 _argvec[2+10] = (unsigned long)arg10; \
3087 _argvec[2+11] = (unsigned long)arg11; \
3089 VALGRIND_ALIGN_STACK \
3091 "std 2,-16(11)\n\t" /* save tocptr */ \
3092 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3093 "addi 1,1,-144\n\t" /* expand stack frame */ \
3096 "std 3,128(1)\n\t" \
3099 "std 3,120(1)\n\t" \
3102 "std 3,112(1)\n\t" \
3104 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3105 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3106 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3107 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3108 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3109 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3110 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3111 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3112 "ld 11, 0(11)\n\t" /* target->r11 */ \
3113 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3116 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3117 VALGRIND_RESTORE_STACK \
3118 : /*out*/ "=r" (_res) \
3119 : /*in*/ "r" (&_argvec[2]) \
3120 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3122 lval = (__typeof__(lval)) _res; \
3125 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3126 arg7,arg8,arg9,arg10,arg11,arg12) \
3128 volatile OrigFn _orig = (orig); \
3129 volatile unsigned long _argvec[3+12]; \
3130 volatile unsigned long _res; \
3131 /* _argvec[0] holds current r2 across the call */ \
3132 _argvec[1] = (unsigned long)_orig.r2; \
3133 _argvec[2] = (unsigned long)_orig.nraddr; \
3134 _argvec[2+1] = (unsigned long)arg1; \
3135 _argvec[2+2] = (unsigned long)arg2; \
3136 _argvec[2+3] = (unsigned long)arg3; \
3137 _argvec[2+4] = (unsigned long)arg4; \
3138 _argvec[2+5] = (unsigned long)arg5; \
3139 _argvec[2+6] = (unsigned long)arg6; \
3140 _argvec[2+7] = (unsigned long)arg7; \
3141 _argvec[2+8] = (unsigned long)arg8; \
3142 _argvec[2+9] = (unsigned long)arg9; \
3143 _argvec[2+10] = (unsigned long)arg10; \
3144 _argvec[2+11] = (unsigned long)arg11; \
3145 _argvec[2+12] = (unsigned long)arg12; \
3147 VALGRIND_ALIGN_STACK \
3149 "std 2,-16(11)\n\t" /* save tocptr */ \
3150 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3151 "addi 1,1,-144\n\t" /* expand stack frame */ \
3154 "std 3,136(1)\n\t" \
3157 "std 3,128(1)\n\t" \
3160 "std 3,120(1)\n\t" \
3163 "std 3,112(1)\n\t" \
3165 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3166 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3167 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3168 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3169 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3170 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3171 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3172 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3173 "ld 11, 0(11)\n\t" /* target->r11 */ \
3174 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3177 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3178 VALGRIND_RESTORE_STACK \
3179 : /*out*/ "=r" (_res) \
3180 : /*in*/ "r" (&_argvec[2]) \
3181 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3183 lval = (__typeof__(lval)) _res; \
3186 #endif /* PLAT_ppc64be_linux */
3188 /* ------------------------- ppc64le-linux ----------------------- */
3189 #if defined(PLAT_ppc64le_linux)
3191 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3193 /* These regs are trashed by the hidden call. */
3194 #define __CALLER_SAVED_REGS \
3195 "lr", "ctr", "xer", \
3196 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3197 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3200 /* Macros to save and align the stack before making a function
3201 call and restore it afterwards as gcc may not keep the stack
3202 pointer aligned if it doesn't realise calls are being made
3203 to other functions. */
3205 #define VALGRIND_ALIGN_STACK \
3207 "rldicr 1,1,0,59\n\t"
3208 #define VALGRIND_RESTORE_STACK \
3211 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3214 #define CALL_FN_W_v(lval, orig) \
3216 volatile OrigFn _orig = (orig); \
3217 volatile unsigned long _argvec[3+0]; \
3218 volatile unsigned long _res; \
3219 /* _argvec[0] holds current r2 across the call */ \
3220 _argvec[1] = (unsigned long)_orig.r2; \
3221 _argvec[2] = (unsigned long)_orig.nraddr; \
3223 VALGRIND_ALIGN_STACK \
3225 "std 2,-16(12)\n\t" /* save tocptr */ \
3226 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3227 "ld 12, 0(12)\n\t" /* target->r12 */ \
3228 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3231 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3232 VALGRIND_RESTORE_STACK \
3233 : /*out*/ "=r" (_res) \
3234 : /*in*/ "r" (&_argvec[2]) \
3235 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3237 lval = (__typeof__(lval)) _res; \
3240 #define CALL_FN_W_W(lval, orig, arg1) \
3242 volatile OrigFn _orig = (orig); \
3243 volatile unsigned long _argvec[3+1]; \
3244 volatile unsigned long _res; \
3245 /* _argvec[0] holds current r2 across the call */ \
3246 _argvec[1] = (unsigned long)_orig.r2; \
3247 _argvec[2] = (unsigned long)_orig.nraddr; \
3248 _argvec[2+1] = (unsigned long)arg1; \
3250 VALGRIND_ALIGN_STACK \
3252 "std 2,-16(12)\n\t" /* save tocptr */ \
3253 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3254 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3255 "ld 12, 0(12)\n\t" /* target->r12 */ \
3256 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3259 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3260 VALGRIND_RESTORE_STACK \
3261 : /*out*/ "=r" (_res) \
3262 : /*in*/ "r" (&_argvec[2]) \
3263 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3265 lval = (__typeof__(lval)) _res; \
3268 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3270 volatile OrigFn _orig = (orig); \
3271 volatile unsigned long _argvec[3+2]; \
3272 volatile unsigned long _res; \
3273 /* _argvec[0] holds current r2 across the call */ \
3274 _argvec[1] = (unsigned long)_orig.r2; \
3275 _argvec[2] = (unsigned long)_orig.nraddr; \
3276 _argvec[2+1] = (unsigned long)arg1; \
3277 _argvec[2+2] = (unsigned long)arg2; \
3279 VALGRIND_ALIGN_STACK \
3281 "std 2,-16(12)\n\t" /* save tocptr */ \
3282 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3283 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3284 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3285 "ld 12, 0(12)\n\t" /* target->r12 */ \
3286 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3289 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3290 VALGRIND_RESTORE_STACK \
3291 : /*out*/ "=r" (_res) \
3292 : /*in*/ "r" (&_argvec[2]) \
3293 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3295 lval = (__typeof__(lval)) _res; \
3298 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3300 volatile OrigFn _orig = (orig); \
3301 volatile unsigned long _argvec[3+3]; \
3302 volatile unsigned long _res; \
3303 /* _argvec[0] holds current r2 across the call */ \
3304 _argvec[1] = (unsigned long)_orig.r2; \
3305 _argvec[2] = (unsigned long)_orig.nraddr; \
3306 _argvec[2+1] = (unsigned long)arg1; \
3307 _argvec[2+2] = (unsigned long)arg2; \
3308 _argvec[2+3] = (unsigned long)arg3; \
3310 VALGRIND_ALIGN_STACK \
3312 "std 2,-16(12)\n\t" /* save tocptr */ \
3313 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3314 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3315 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3316 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3317 "ld 12, 0(12)\n\t" /* target->r12 */ \
3318 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3321 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3322 VALGRIND_RESTORE_STACK \
3323 : /*out*/ "=r" (_res) \
3324 : /*in*/ "r" (&_argvec[2]) \
3325 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3327 lval = (__typeof__(lval)) _res; \
3330 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3332 volatile OrigFn _orig = (orig); \
3333 volatile unsigned long _argvec[3+4]; \
3334 volatile unsigned long _res; \
3335 /* _argvec[0] holds current r2 across the call */ \
3336 _argvec[1] = (unsigned long)_orig.r2; \
3337 _argvec[2] = (unsigned long)_orig.nraddr; \
3338 _argvec[2+1] = (unsigned long)arg1; \
3339 _argvec[2+2] = (unsigned long)arg2; \
3340 _argvec[2+3] = (unsigned long)arg3; \
3341 _argvec[2+4] = (unsigned long)arg4; \
3343 VALGRIND_ALIGN_STACK \
3345 "std 2,-16(12)\n\t" /* save tocptr */ \
3346 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3347 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3348 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3349 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3350 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3351 "ld 12, 0(12)\n\t" /* target->r12 */ \
3352 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3355 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3356 VALGRIND_RESTORE_STACK \
3357 : /*out*/ "=r" (_res) \
3358 : /*in*/ "r" (&_argvec[2]) \
3359 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3361 lval = (__typeof__(lval)) _res; \
3364 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3366 volatile OrigFn _orig = (orig); \
3367 volatile unsigned long _argvec[3+5]; \
3368 volatile unsigned long _res; \
3369 /* _argvec[0] holds current r2 across the call */ \
3370 _argvec[1] = (unsigned long)_orig.r2; \
3371 _argvec[2] = (unsigned long)_orig.nraddr; \
3372 _argvec[2+1] = (unsigned long)arg1; \
3373 _argvec[2+2] = (unsigned long)arg2; \
3374 _argvec[2+3] = (unsigned long)arg3; \
3375 _argvec[2+4] = (unsigned long)arg4; \
3376 _argvec[2+5] = (unsigned long)arg5; \
3378 VALGRIND_ALIGN_STACK \
3380 "std 2,-16(12)\n\t" /* save tocptr */ \
3381 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3382 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3383 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3384 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3385 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3386 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3387 "ld 12, 0(12)\n\t" /* target->r12 */ \
3388 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3391 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3392 VALGRIND_RESTORE_STACK \
3393 : /*out*/ "=r" (_res) \
3394 : /*in*/ "r" (&_argvec[2]) \
3395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3397 lval = (__typeof__(lval)) _res; \
3400 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3402 volatile OrigFn _orig = (orig); \
3403 volatile unsigned long _argvec[3+6]; \
3404 volatile unsigned long _res; \
3405 /* _argvec[0] holds current r2 across the call */ \
3406 _argvec[1] = (unsigned long)_orig.r2; \
3407 _argvec[2] = (unsigned long)_orig.nraddr; \
3408 _argvec[2+1] = (unsigned long)arg1; \
3409 _argvec[2+2] = (unsigned long)arg2; \
3410 _argvec[2+3] = (unsigned long)arg3; \
3411 _argvec[2+4] = (unsigned long)arg4; \
3412 _argvec[2+5] = (unsigned long)arg5; \
3413 _argvec[2+6] = (unsigned long)arg6; \
3415 VALGRIND_ALIGN_STACK \
3417 "std 2,-16(12)\n\t" /* save tocptr */ \
3418 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3419 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3420 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3421 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3422 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3423 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3424 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3425 "ld 12, 0(12)\n\t" /* target->r12 */ \
3426 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3429 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3430 VALGRIND_RESTORE_STACK \
3431 : /*out*/ "=r" (_res) \
3432 : /*in*/ "r" (&_argvec[2]) \
3433 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3435 lval = (__typeof__(lval)) _res; \
3438 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3441 volatile OrigFn _orig = (orig); \
3442 volatile unsigned long _argvec[3+7]; \
3443 volatile unsigned long _res; \
3444 /* _argvec[0] holds current r2 across the call */ \
3445 _argvec[1] = (unsigned long)_orig.r2; \
3446 _argvec[2] = (unsigned long)_orig.nraddr; \
3447 _argvec[2+1] = (unsigned long)arg1; \
3448 _argvec[2+2] = (unsigned long)arg2; \
3449 _argvec[2+3] = (unsigned long)arg3; \
3450 _argvec[2+4] = (unsigned long)arg4; \
3451 _argvec[2+5] = (unsigned long)arg5; \
3452 _argvec[2+6] = (unsigned long)arg6; \
3453 _argvec[2+7] = (unsigned long)arg7; \
3455 VALGRIND_ALIGN_STACK \
3457 "std 2,-16(12)\n\t" /* save tocptr */ \
3458 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3459 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3460 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3461 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3462 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3463 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3464 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3465 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3466 "ld 12, 0(12)\n\t" /* target->r12 */ \
3467 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3470 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3471 VALGRIND_RESTORE_STACK \
3472 : /*out*/ "=r" (_res) \
3473 : /*in*/ "r" (&_argvec[2]) \
3474 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3476 lval = (__typeof__(lval)) _res; \
3479 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3482 volatile OrigFn _orig = (orig); \
3483 volatile unsigned long _argvec[3+8]; \
3484 volatile unsigned long _res; \
3485 /* _argvec[0] holds current r2 across the call */ \
3486 _argvec[1] = (unsigned long)_orig.r2; \
3487 _argvec[2] = (unsigned long)_orig.nraddr; \
3488 _argvec[2+1] = (unsigned long)arg1; \
3489 _argvec[2+2] = (unsigned long)arg2; \
3490 _argvec[2+3] = (unsigned long)arg3; \
3491 _argvec[2+4] = (unsigned long)arg4; \
3492 _argvec[2+5] = (unsigned long)arg5; \
3493 _argvec[2+6] = (unsigned long)arg6; \
3494 _argvec[2+7] = (unsigned long)arg7; \
3495 _argvec[2+8] = (unsigned long)arg8; \
3497 VALGRIND_ALIGN_STACK \
3499 "std 2,-16(12)\n\t" /* save tocptr */ \
3500 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3501 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3502 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3503 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3504 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3505 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3506 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3507 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3508 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3509 "ld 12, 0(12)\n\t" /* target->r12 */ \
3510 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3513 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3514 VALGRIND_RESTORE_STACK \
3515 : /*out*/ "=r" (_res) \
3516 : /*in*/ "r" (&_argvec[2]) \
3517 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3519 lval = (__typeof__(lval)) _res; \
3522 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3525 volatile OrigFn _orig = (orig); \
3526 volatile unsigned long _argvec[3+9]; \
3527 volatile unsigned long _res; \
3528 /* _argvec[0] holds current r2 across the call */ \
3529 _argvec[1] = (unsigned long)_orig.r2; \
3530 _argvec[2] = (unsigned long)_orig.nraddr; \
3531 _argvec[2+1] = (unsigned long)arg1; \
3532 _argvec[2+2] = (unsigned long)arg2; \
3533 _argvec[2+3] = (unsigned long)arg3; \
3534 _argvec[2+4] = (unsigned long)arg4; \
3535 _argvec[2+5] = (unsigned long)arg5; \
3536 _argvec[2+6] = (unsigned long)arg6; \
3537 _argvec[2+7] = (unsigned long)arg7; \
3538 _argvec[2+8] = (unsigned long)arg8; \
3539 _argvec[2+9] = (unsigned long)arg9; \
3541 VALGRIND_ALIGN_STACK \
3543 "std 2,-16(12)\n\t" /* save tocptr */ \
3544 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3545 "addi 1,1,-128\n\t" /* expand stack frame */ \
3550 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3551 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3552 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3553 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3554 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3555 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3556 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3557 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3558 "ld 12, 0(12)\n\t" /* target->r12 */ \
3559 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3562 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3563 VALGRIND_RESTORE_STACK \
3564 : /*out*/ "=r" (_res) \
3565 : /*in*/ "r" (&_argvec[2]) \
3566 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3568 lval = (__typeof__(lval)) _res; \
3571 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3572 arg7,arg8,arg9,arg10) \
3574 volatile OrigFn _orig = (orig); \
3575 volatile unsigned long _argvec[3+10]; \
3576 volatile unsigned long _res; \
3577 /* _argvec[0] holds current r2 across the call */ \
3578 _argvec[1] = (unsigned long)_orig.r2; \
3579 _argvec[2] = (unsigned long)_orig.nraddr; \
3580 _argvec[2+1] = (unsigned long)arg1; \
3581 _argvec[2+2] = (unsigned long)arg2; \
3582 _argvec[2+3] = (unsigned long)arg3; \
3583 _argvec[2+4] = (unsigned long)arg4; \
3584 _argvec[2+5] = (unsigned long)arg5; \
3585 _argvec[2+6] = (unsigned long)arg6; \
3586 _argvec[2+7] = (unsigned long)arg7; \
3587 _argvec[2+8] = (unsigned long)arg8; \
3588 _argvec[2+9] = (unsigned long)arg9; \
3589 _argvec[2+10] = (unsigned long)arg10; \
3591 VALGRIND_ALIGN_STACK \
3593 "std 2,-16(12)\n\t" /* save tocptr */ \
3594 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3595 "addi 1,1,-128\n\t" /* expand stack frame */ \
3598 "std 3,104(1)\n\t" \
3603 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3604 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3605 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3606 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3607 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3608 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3609 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3610 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3611 "ld 12, 0(12)\n\t" /* target->r12 */ \
3612 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3615 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3616 VALGRIND_RESTORE_STACK \
3617 : /*out*/ "=r" (_res) \
3618 : /*in*/ "r" (&_argvec[2]) \
3619 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3621 lval = (__typeof__(lval)) _res; \
3624 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3625 arg7,arg8,arg9,arg10,arg11) \
3627 volatile OrigFn _orig = (orig); \
3628 volatile unsigned long _argvec[3+11]; \
3629 volatile unsigned long _res; \
3630 /* _argvec[0] holds current r2 across the call */ \
3631 _argvec[1] = (unsigned long)_orig.r2; \
3632 _argvec[2] = (unsigned long)_orig.nraddr; \
3633 _argvec[2+1] = (unsigned long)arg1; \
3634 _argvec[2+2] = (unsigned long)arg2; \
3635 _argvec[2+3] = (unsigned long)arg3; \
3636 _argvec[2+4] = (unsigned long)arg4; \
3637 _argvec[2+5] = (unsigned long)arg5; \
3638 _argvec[2+6] = (unsigned long)arg6; \
3639 _argvec[2+7] = (unsigned long)arg7; \
3640 _argvec[2+8] = (unsigned long)arg8; \
3641 _argvec[2+9] = (unsigned long)arg9; \
3642 _argvec[2+10] = (unsigned long)arg10; \
3643 _argvec[2+11] = (unsigned long)arg11; \
3645 VALGRIND_ALIGN_STACK \
3647 "std 2,-16(12)\n\t" /* save tocptr */ \
3648 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3649 "addi 1,1,-144\n\t" /* expand stack frame */ \
3652 "std 3,112(1)\n\t" \
3655 "std 3,104(1)\n\t" \
3660 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3661 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3662 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3663 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3664 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3665 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3666 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3667 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3668 "ld 12, 0(12)\n\t" /* target->r12 */ \
3669 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3672 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3673 VALGRIND_RESTORE_STACK \
3674 : /*out*/ "=r" (_res) \
3675 : /*in*/ "r" (&_argvec[2]) \
3676 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3678 lval = (__typeof__(lval)) _res; \
3681 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3682 arg7,arg8,arg9,arg10,arg11,arg12) \
3684 volatile OrigFn _orig = (orig); \
3685 volatile unsigned long _argvec[3+12]; \
3686 volatile unsigned long _res; \
3687 /* _argvec[0] holds current r2 across the call */ \
3688 _argvec[1] = (unsigned long)_orig.r2; \
3689 _argvec[2] = (unsigned long)_orig.nraddr; \
3690 _argvec[2+1] = (unsigned long)arg1; \
3691 _argvec[2+2] = (unsigned long)arg2; \
3692 _argvec[2+3] = (unsigned long)arg3; \
3693 _argvec[2+4] = (unsigned long)arg4; \
3694 _argvec[2+5] = (unsigned long)arg5; \
3695 _argvec[2+6] = (unsigned long)arg6; \
3696 _argvec[2+7] = (unsigned long)arg7; \
3697 _argvec[2+8] = (unsigned long)arg8; \
3698 _argvec[2+9] = (unsigned long)arg9; \
3699 _argvec[2+10] = (unsigned long)arg10; \
3700 _argvec[2+11] = (unsigned long)arg11; \
3701 _argvec[2+12] = (unsigned long)arg12; \
3703 VALGRIND_ALIGN_STACK \
3705 "std 2,-16(12)\n\t" /* save tocptr */ \
3706 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3707 "addi 1,1,-144\n\t" /* expand stack frame */ \
3710 "std 3,120(1)\n\t" \
3713 "std 3,112(1)\n\t" \
3716 "std 3,104(1)\n\t" \
3721 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3722 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3723 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3724 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3725 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3726 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3727 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3728 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3729 "ld 12, 0(12)\n\t" /* target->r12 */ \
3730 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3733 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3734 VALGRIND_RESTORE_STACK \
3735 : /*out*/ "=r" (_res) \
3736 : /*in*/ "r" (&_argvec[2]) \
3737 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3739 lval = (__typeof__(lval)) _res; \
3742 #endif /* PLAT_ppc64le_linux */
3744 /* ------------------------- arm-linux ------------------------- */
3746 #if defined(PLAT_arm_linux)
3748 /* These regs are trashed by the hidden call. */
3749 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3751 /* Macros to save and align the stack before making a function
3752 call and restore it afterwards as gcc may not keep the stack
3753 pointer aligned if it doesn't realise calls are being made
3754 to other functions. */
3756 /* This is a bit tricky. We store the original stack pointer in r10
3757 as it is callee-saves. gcc doesn't allow the use of r11 for some
3758 reason. Also, we can't directly "bic" the stack pointer in thumb
3759 mode since r13 isn't an allowed register number in that context.
3760 So use r4 as a temporary, since that is about to get trashed
3761 anyway, just after each use of this macro. Side effect is we need
3762 to be very careful about any future changes, since
3763 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3764 #define VALGRIND_ALIGN_STACK \
3767 "bic r4, r4, #7\n\t" \
3769 #define VALGRIND_RESTORE_STACK \
3772 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3775 #define CALL_FN_W_v(lval, orig) \
3777 volatile OrigFn _orig = (orig); \
3778 volatile unsigned long _argvec[1]; \
3779 volatile unsigned long _res; \
3780 _argvec[0] = (unsigned long)_orig.nraddr; \
3782 VALGRIND_ALIGN_STACK \
3783 "ldr r4, [%1] \n\t" /* target->r4 */ \
3784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3785 VALGRIND_RESTORE_STACK \
3787 : /*out*/ "=r" (_res) \
3788 : /*in*/ "0" (&_argvec[0]) \
3789 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3791 lval = (__typeof__(lval)) _res; \
3794 #define CALL_FN_W_W(lval, orig, arg1) \
3796 volatile OrigFn _orig = (orig); \
3797 volatile unsigned long _argvec[2]; \
3798 volatile unsigned long _res; \
3799 _argvec[0] = (unsigned long)_orig.nraddr; \
3800 _argvec[1] = (unsigned long)(arg1); \
3802 VALGRIND_ALIGN_STACK \
3803 "ldr r0, [%1, #4] \n\t" \
3804 "ldr r4, [%1] \n\t" /* target->r4 */ \
3805 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3806 VALGRIND_RESTORE_STACK \
3808 : /*out*/ "=r" (_res) \
3809 : /*in*/ "0" (&_argvec[0]) \
3810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3812 lval = (__typeof__(lval)) _res; \
3815 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3817 volatile OrigFn _orig = (orig); \
3818 volatile unsigned long _argvec[3]; \
3819 volatile unsigned long _res; \
3820 _argvec[0] = (unsigned long)_orig.nraddr; \
3821 _argvec[1] = (unsigned long)(arg1); \
3822 _argvec[2] = (unsigned long)(arg2); \
3824 VALGRIND_ALIGN_STACK \
3825 "ldr r0, [%1, #4] \n\t" \
3826 "ldr r1, [%1, #8] \n\t" \
3827 "ldr r4, [%1] \n\t" /* target->r4 */ \
3828 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3829 VALGRIND_RESTORE_STACK \
3831 : /*out*/ "=r" (_res) \
3832 : /*in*/ "0" (&_argvec[0]) \
3833 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3835 lval = (__typeof__(lval)) _res; \
3838 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3840 volatile OrigFn _orig = (orig); \
3841 volatile unsigned long _argvec[4]; \
3842 volatile unsigned long _res; \
3843 _argvec[0] = (unsigned long)_orig.nraddr; \
3844 _argvec[1] = (unsigned long)(arg1); \
3845 _argvec[2] = (unsigned long)(arg2); \
3846 _argvec[3] = (unsigned long)(arg3); \
3848 VALGRIND_ALIGN_STACK \
3849 "ldr r0, [%1, #4] \n\t" \
3850 "ldr r1, [%1, #8] \n\t" \
3851 "ldr r2, [%1, #12] \n\t" \
3852 "ldr r4, [%1] \n\t" /* target->r4 */ \
3853 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3854 VALGRIND_RESTORE_STACK \
3856 : /*out*/ "=r" (_res) \
3857 : /*in*/ "0" (&_argvec[0]) \
3858 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3860 lval = (__typeof__(lval)) _res; \
3863 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3865 volatile OrigFn _orig = (orig); \
3866 volatile unsigned long _argvec[5]; \
3867 volatile unsigned long _res; \
3868 _argvec[0] = (unsigned long)_orig.nraddr; \
3869 _argvec[1] = (unsigned long)(arg1); \
3870 _argvec[2] = (unsigned long)(arg2); \
3871 _argvec[3] = (unsigned long)(arg3); \
3872 _argvec[4] = (unsigned long)(arg4); \
3874 VALGRIND_ALIGN_STACK \
3875 "ldr r0, [%1, #4] \n\t" \
3876 "ldr r1, [%1, #8] \n\t" \
3877 "ldr r2, [%1, #12] \n\t" \
3878 "ldr r3, [%1, #16] \n\t" \
3879 "ldr r4, [%1] \n\t" /* target->r4 */ \
3880 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3881 VALGRIND_RESTORE_STACK \
3883 : /*out*/ "=r" (_res) \
3884 : /*in*/ "0" (&_argvec[0]) \
3885 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3887 lval = (__typeof__(lval)) _res; \
3890 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3892 volatile OrigFn _orig = (orig); \
3893 volatile unsigned long _argvec[6]; \
3894 volatile unsigned long _res; \
3895 _argvec[0] = (unsigned long)_orig.nraddr; \
3896 _argvec[1] = (unsigned long)(arg1); \
3897 _argvec[2] = (unsigned long)(arg2); \
3898 _argvec[3] = (unsigned long)(arg3); \
3899 _argvec[4] = (unsigned long)(arg4); \
3900 _argvec[5] = (unsigned long)(arg5); \
3902 VALGRIND_ALIGN_STACK \
3903 "sub sp, sp, #4 \n\t" \
3904 "ldr r0, [%1, #20] \n\t" \
3906 "ldr r0, [%1, #4] \n\t" \
3907 "ldr r1, [%1, #8] \n\t" \
3908 "ldr r2, [%1, #12] \n\t" \
3909 "ldr r3, [%1, #16] \n\t" \
3910 "ldr r4, [%1] \n\t" /* target->r4 */ \
3911 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3912 VALGRIND_RESTORE_STACK \
3914 : /*out*/ "=r" (_res) \
3915 : /*in*/ "0" (&_argvec[0]) \
3916 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3918 lval = (__typeof__(lval)) _res; \
3921 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3923 volatile OrigFn _orig = (orig); \
3924 volatile unsigned long _argvec[7]; \
3925 volatile unsigned long _res; \
3926 _argvec[0] = (unsigned long)_orig.nraddr; \
3927 _argvec[1] = (unsigned long)(arg1); \
3928 _argvec[2] = (unsigned long)(arg2); \
3929 _argvec[3] = (unsigned long)(arg3); \
3930 _argvec[4] = (unsigned long)(arg4); \
3931 _argvec[5] = (unsigned long)(arg5); \
3932 _argvec[6] = (unsigned long)(arg6); \
3934 VALGRIND_ALIGN_STACK \
3935 "ldr r0, [%1, #20] \n\t" \
3936 "ldr r1, [%1, #24] \n\t" \
3937 "push {r0, r1} \n\t" \
3938 "ldr r0, [%1, #4] \n\t" \
3939 "ldr r1, [%1, #8] \n\t" \
3940 "ldr r2, [%1, #12] \n\t" \
3941 "ldr r3, [%1, #16] \n\t" \
3942 "ldr r4, [%1] \n\t" /* target->r4 */ \
3943 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3944 VALGRIND_RESTORE_STACK \
3946 : /*out*/ "=r" (_res) \
3947 : /*in*/ "0" (&_argvec[0]) \
3948 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3950 lval = (__typeof__(lval)) _res; \
3953 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3956 volatile OrigFn _orig = (orig); \
3957 volatile unsigned long _argvec[8]; \
3958 volatile unsigned long _res; \
3959 _argvec[0] = (unsigned long)_orig.nraddr; \
3960 _argvec[1] = (unsigned long)(arg1); \
3961 _argvec[2] = (unsigned long)(arg2); \
3962 _argvec[3] = (unsigned long)(arg3); \
3963 _argvec[4] = (unsigned long)(arg4); \
3964 _argvec[5] = (unsigned long)(arg5); \
3965 _argvec[6] = (unsigned long)(arg6); \
3966 _argvec[7] = (unsigned long)(arg7); \
3968 VALGRIND_ALIGN_STACK \
3969 "sub sp, sp, #4 \n\t" \
3970 "ldr r0, [%1, #20] \n\t" \
3971 "ldr r1, [%1, #24] \n\t" \
3972 "ldr r2, [%1, #28] \n\t" \
3973 "push {r0, r1, r2} \n\t" \
3974 "ldr r0, [%1, #4] \n\t" \
3975 "ldr r1, [%1, #8] \n\t" \
3976 "ldr r2, [%1, #12] \n\t" \
3977 "ldr r3, [%1, #16] \n\t" \
3978 "ldr r4, [%1] \n\t" /* target->r4 */ \
3979 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3980 VALGRIND_RESTORE_STACK \
3982 : /*out*/ "=r" (_res) \
3983 : /*in*/ "0" (&_argvec[0]) \
3984 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3986 lval = (__typeof__(lval)) _res; \
3989 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3992 volatile OrigFn _orig = (orig); \
3993 volatile unsigned long _argvec[9]; \
3994 volatile unsigned long _res; \
3995 _argvec[0] = (unsigned long)_orig.nraddr; \
3996 _argvec[1] = (unsigned long)(arg1); \
3997 _argvec[2] = (unsigned long)(arg2); \
3998 _argvec[3] = (unsigned long)(arg3); \
3999 _argvec[4] = (unsigned long)(arg4); \
4000 _argvec[5] = (unsigned long)(arg5); \
4001 _argvec[6] = (unsigned long)(arg6); \
4002 _argvec[7] = (unsigned long)(arg7); \
4003 _argvec[8] = (unsigned long)(arg8); \
4005 VALGRIND_ALIGN_STACK \
4006 "ldr r0, [%1, #20] \n\t" \
4007 "ldr r1, [%1, #24] \n\t" \
4008 "ldr r2, [%1, #28] \n\t" \
4009 "ldr r3, [%1, #32] \n\t" \
4010 "push {r0, r1, r2, r3} \n\t" \
4011 "ldr r0, [%1, #4] \n\t" \
4012 "ldr r1, [%1, #8] \n\t" \
4013 "ldr r2, [%1, #12] \n\t" \
4014 "ldr r3, [%1, #16] \n\t" \
4015 "ldr r4, [%1] \n\t" /* target->r4 */ \
4016 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4017 VALGRIND_RESTORE_STACK \
4019 : /*out*/ "=r" (_res) \
4020 : /*in*/ "0" (&_argvec[0]) \
4021 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4023 lval = (__typeof__(lval)) _res; \
4026 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4029 volatile OrigFn _orig = (orig); \
4030 volatile unsigned long _argvec[10]; \
4031 volatile unsigned long _res; \
4032 _argvec[0] = (unsigned long)_orig.nraddr; \
4033 _argvec[1] = (unsigned long)(arg1); \
4034 _argvec[2] = (unsigned long)(arg2); \
4035 _argvec[3] = (unsigned long)(arg3); \
4036 _argvec[4] = (unsigned long)(arg4); \
4037 _argvec[5] = (unsigned long)(arg5); \
4038 _argvec[6] = (unsigned long)(arg6); \
4039 _argvec[7] = (unsigned long)(arg7); \
4040 _argvec[8] = (unsigned long)(arg8); \
4041 _argvec[9] = (unsigned long)(arg9); \
4043 VALGRIND_ALIGN_STACK \
4044 "sub sp, sp, #4 \n\t" \
4045 "ldr r0, [%1, #20] \n\t" \
4046 "ldr r1, [%1, #24] \n\t" \
4047 "ldr r2, [%1, #28] \n\t" \
4048 "ldr r3, [%1, #32] \n\t" \
4049 "ldr r4, [%1, #36] \n\t" \
4050 "push {r0, r1, r2, r3, r4} \n\t" \
4051 "ldr r0, [%1, #4] \n\t" \
4052 "ldr r1, [%1, #8] \n\t" \
4053 "ldr r2, [%1, #12] \n\t" \
4054 "ldr r3, [%1, #16] \n\t" \
4055 "ldr r4, [%1] \n\t" /* target->r4 */ \
4056 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4057 VALGRIND_RESTORE_STACK \
4059 : /*out*/ "=r" (_res) \
4060 : /*in*/ "0" (&_argvec[0]) \
4061 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4063 lval = (__typeof__(lval)) _res; \
4066 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4067 arg7,arg8,arg9,arg10) \
4069 volatile OrigFn _orig = (orig); \
4070 volatile unsigned long _argvec[11]; \
4071 volatile unsigned long _res; \
4072 _argvec[0] = (unsigned long)_orig.nraddr; \
4073 _argvec[1] = (unsigned long)(arg1); \
4074 _argvec[2] = (unsigned long)(arg2); \
4075 _argvec[3] = (unsigned long)(arg3); \
4076 _argvec[4] = (unsigned long)(arg4); \
4077 _argvec[5] = (unsigned long)(arg5); \
4078 _argvec[6] = (unsigned long)(arg6); \
4079 _argvec[7] = (unsigned long)(arg7); \
4080 _argvec[8] = (unsigned long)(arg8); \
4081 _argvec[9] = (unsigned long)(arg9); \
4082 _argvec[10] = (unsigned long)(arg10); \
4084 VALGRIND_ALIGN_STACK \
4085 "ldr r0, [%1, #40] \n\t" \
4087 "ldr r0, [%1, #20] \n\t" \
4088 "ldr r1, [%1, #24] \n\t" \
4089 "ldr r2, [%1, #28] \n\t" \
4090 "ldr r3, [%1, #32] \n\t" \
4091 "ldr r4, [%1, #36] \n\t" \
4092 "push {r0, r1, r2, r3, r4} \n\t" \
4093 "ldr r0, [%1, #4] \n\t" \
4094 "ldr r1, [%1, #8] \n\t" \
4095 "ldr r2, [%1, #12] \n\t" \
4096 "ldr r3, [%1, #16] \n\t" \
4097 "ldr r4, [%1] \n\t" /* target->r4 */ \
4098 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4099 VALGRIND_RESTORE_STACK \
4101 : /*out*/ "=r" (_res) \
4102 : /*in*/ "0" (&_argvec[0]) \
4103 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4105 lval = (__typeof__(lval)) _res; \
4108 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4109 arg6,arg7,arg8,arg9,arg10, \
4112 volatile OrigFn _orig = (orig); \
4113 volatile unsigned long _argvec[12]; \
4114 volatile unsigned long _res; \
4115 _argvec[0] = (unsigned long)_orig.nraddr; \
4116 _argvec[1] = (unsigned long)(arg1); \
4117 _argvec[2] = (unsigned long)(arg2); \
4118 _argvec[3] = (unsigned long)(arg3); \
4119 _argvec[4] = (unsigned long)(arg4); \
4120 _argvec[5] = (unsigned long)(arg5); \
4121 _argvec[6] = (unsigned long)(arg6); \
4122 _argvec[7] = (unsigned long)(arg7); \
4123 _argvec[8] = (unsigned long)(arg8); \
4124 _argvec[9] = (unsigned long)(arg9); \
4125 _argvec[10] = (unsigned long)(arg10); \
4126 _argvec[11] = (unsigned long)(arg11); \
4128 VALGRIND_ALIGN_STACK \
4129 "sub sp, sp, #4 \n\t" \
4130 "ldr r0, [%1, #40] \n\t" \
4131 "ldr r1, [%1, #44] \n\t" \
4132 "push {r0, r1} \n\t" \
4133 "ldr r0, [%1, #20] \n\t" \
4134 "ldr r1, [%1, #24] \n\t" \
4135 "ldr r2, [%1, #28] \n\t" \
4136 "ldr r3, [%1, #32] \n\t" \
4137 "ldr r4, [%1, #36] \n\t" \
4138 "push {r0, r1, r2, r3, r4} \n\t" \
4139 "ldr r0, [%1, #4] \n\t" \
4140 "ldr r1, [%1, #8] \n\t" \
4141 "ldr r2, [%1, #12] \n\t" \
4142 "ldr r3, [%1, #16] \n\t" \
4143 "ldr r4, [%1] \n\t" /* target->r4 */ \
4144 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4145 VALGRIND_RESTORE_STACK \
4147 : /*out*/ "=r" (_res) \
4148 : /*in*/ "0" (&_argvec[0]) \
4149 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4151 lval = (__typeof__(lval)) _res; \
4154 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4155 arg6,arg7,arg8,arg9,arg10, \
4158 volatile OrigFn _orig = (orig); \
4159 volatile unsigned long _argvec[13]; \
4160 volatile unsigned long _res; \
4161 _argvec[0] = (unsigned long)_orig.nraddr; \
4162 _argvec[1] = (unsigned long)(arg1); \
4163 _argvec[2] = (unsigned long)(arg2); \
4164 _argvec[3] = (unsigned long)(arg3); \
4165 _argvec[4] = (unsigned long)(arg4); \
4166 _argvec[5] = (unsigned long)(arg5); \
4167 _argvec[6] = (unsigned long)(arg6); \
4168 _argvec[7] = (unsigned long)(arg7); \
4169 _argvec[8] = (unsigned long)(arg8); \
4170 _argvec[9] = (unsigned long)(arg9); \
4171 _argvec[10] = (unsigned long)(arg10); \
4172 _argvec[11] = (unsigned long)(arg11); \
4173 _argvec[12] = (unsigned long)(arg12); \
4175 VALGRIND_ALIGN_STACK \
4176 "ldr r0, [%1, #40] \n\t" \
4177 "ldr r1, [%1, #44] \n\t" \
4178 "ldr r2, [%1, #48] \n\t" \
4179 "push {r0, r1, r2} \n\t" \
4180 "ldr r0, [%1, #20] \n\t" \
4181 "ldr r1, [%1, #24] \n\t" \
4182 "ldr r2, [%1, #28] \n\t" \
4183 "ldr r3, [%1, #32] \n\t" \
4184 "ldr r4, [%1, #36] \n\t" \
4185 "push {r0, r1, r2, r3, r4} \n\t" \
4186 "ldr r0, [%1, #4] \n\t" \
4187 "ldr r1, [%1, #8] \n\t" \
4188 "ldr r2, [%1, #12] \n\t" \
4189 "ldr r3, [%1, #16] \n\t" \
4190 "ldr r4, [%1] \n\t" /* target->r4 */ \
4191 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4192 VALGRIND_RESTORE_STACK \
4194 : /*out*/ "=r" (_res) \
4195 : /*in*/ "0" (&_argvec[0]) \
4196 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4198 lval = (__typeof__(lval)) _res; \
4201 #endif /* PLAT_arm_linux */
4203 /* ------------------------ arm64-linux ------------------------ */
4205 #if defined(PLAT_arm64_linux)
4207 /* These regs are trashed by the hidden call. */
4208 #define __CALLER_SAVED_REGS \
4209 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4210 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4211 "x18", "x19", "x20", "x30", \
4212 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4213 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4214 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4215 "v26", "v27", "v28", "v29", "v30", "v31"
4217 /* x21 is callee-saved, so we can use it to save and restore SP around
4219 #define VALGRIND_ALIGN_STACK \
4221 "bic sp, x21, #15\n\t"
4222 #define VALGRIND_RESTORE_STACK \
4225 /* These CALL_FN_ macros assume that on arm64-linux,
4226 sizeof(unsigned long) == 8. */
4228 #define CALL_FN_W_v(lval, orig) \
4230 volatile OrigFn _orig = (orig); \
4231 volatile unsigned long _argvec[1]; \
4232 volatile unsigned long _res; \
4233 _argvec[0] = (unsigned long)_orig.nraddr; \
4235 VALGRIND_ALIGN_STACK \
4236 "ldr x8, [%1] \n\t" /* target->x8 */ \
4237 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4238 VALGRIND_RESTORE_STACK \
4240 : /*out*/ "=r" (_res) \
4241 : /*in*/ "0" (&_argvec[0]) \
4242 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4244 lval = (__typeof__(lval)) _res; \
4247 #define CALL_FN_W_W(lval, orig, arg1) \
4249 volatile OrigFn _orig = (orig); \
4250 volatile unsigned long _argvec[2]; \
4251 volatile unsigned long _res; \
4252 _argvec[0] = (unsigned long)_orig.nraddr; \
4253 _argvec[1] = (unsigned long)(arg1); \
4255 VALGRIND_ALIGN_STACK \
4256 "ldr x0, [%1, #8] \n\t" \
4257 "ldr x8, [%1] \n\t" /* target->x8 */ \
4258 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4259 VALGRIND_RESTORE_STACK \
4261 : /*out*/ "=r" (_res) \
4262 : /*in*/ "0" (&_argvec[0]) \
4263 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4265 lval = (__typeof__(lval)) _res; \
4268 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4270 volatile OrigFn _orig = (orig); \
4271 volatile unsigned long _argvec[3]; \
4272 volatile unsigned long _res; \
4273 _argvec[0] = (unsigned long)_orig.nraddr; \
4274 _argvec[1] = (unsigned long)(arg1); \
4275 _argvec[2] = (unsigned long)(arg2); \
4277 VALGRIND_ALIGN_STACK \
4278 "ldr x0, [%1, #8] \n\t" \
4279 "ldr x1, [%1, #16] \n\t" \
4280 "ldr x8, [%1] \n\t" /* target->x8 */ \
4281 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4282 VALGRIND_RESTORE_STACK \
4284 : /*out*/ "=r" (_res) \
4285 : /*in*/ "0" (&_argvec[0]) \
4286 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4288 lval = (__typeof__(lval)) _res; \
4291 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4293 volatile OrigFn _orig = (orig); \
4294 volatile unsigned long _argvec[4]; \
4295 volatile unsigned long _res; \
4296 _argvec[0] = (unsigned long)_orig.nraddr; \
4297 _argvec[1] = (unsigned long)(arg1); \
4298 _argvec[2] = (unsigned long)(arg2); \
4299 _argvec[3] = (unsigned long)(arg3); \
4301 VALGRIND_ALIGN_STACK \
4302 "ldr x0, [%1, #8] \n\t" \
4303 "ldr x1, [%1, #16] \n\t" \
4304 "ldr x2, [%1, #24] \n\t" \
4305 "ldr x8, [%1] \n\t" /* target->x8 */ \
4306 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4307 VALGRIND_RESTORE_STACK \
4309 : /*out*/ "=r" (_res) \
4310 : /*in*/ "0" (&_argvec[0]) \
4311 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4313 lval = (__typeof__(lval)) _res; \
4316 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4318 volatile OrigFn _orig = (orig); \
4319 volatile unsigned long _argvec[5]; \
4320 volatile unsigned long _res; \
4321 _argvec[0] = (unsigned long)_orig.nraddr; \
4322 _argvec[1] = (unsigned long)(arg1); \
4323 _argvec[2] = (unsigned long)(arg2); \
4324 _argvec[3] = (unsigned long)(arg3); \
4325 _argvec[4] = (unsigned long)(arg4); \
4327 VALGRIND_ALIGN_STACK \
4328 "ldr x0, [%1, #8] \n\t" \
4329 "ldr x1, [%1, #16] \n\t" \
4330 "ldr x2, [%1, #24] \n\t" \
4331 "ldr x3, [%1, #32] \n\t" \
4332 "ldr x8, [%1] \n\t" /* target->x8 */ \
4333 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4334 VALGRIND_RESTORE_STACK \
4336 : /*out*/ "=r" (_res) \
4337 : /*in*/ "0" (&_argvec[0]) \
4338 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4340 lval = (__typeof__(lval)) _res; \
4343 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4345 volatile OrigFn _orig = (orig); \
4346 volatile unsigned long _argvec[6]; \
4347 volatile unsigned long _res; \
4348 _argvec[0] = (unsigned long)_orig.nraddr; \
4349 _argvec[1] = (unsigned long)(arg1); \
4350 _argvec[2] = (unsigned long)(arg2); \
4351 _argvec[3] = (unsigned long)(arg3); \
4352 _argvec[4] = (unsigned long)(arg4); \
4353 _argvec[5] = (unsigned long)(arg5); \
4355 VALGRIND_ALIGN_STACK \
4356 "ldr x0, [%1, #8] \n\t" \
4357 "ldr x1, [%1, #16] \n\t" \
4358 "ldr x2, [%1, #24] \n\t" \
4359 "ldr x3, [%1, #32] \n\t" \
4360 "ldr x4, [%1, #40] \n\t" \
4361 "ldr x8, [%1] \n\t" /* target->x8 */ \
4362 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4363 VALGRIND_RESTORE_STACK \
4365 : /*out*/ "=r" (_res) \
4366 : /*in*/ "0" (&_argvec[0]) \
4367 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4369 lval = (__typeof__(lval)) _res; \
4372 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4374 volatile OrigFn _orig = (orig); \
4375 volatile unsigned long _argvec[7]; \
4376 volatile unsigned long _res; \
4377 _argvec[0] = (unsigned long)_orig.nraddr; \
4378 _argvec[1] = (unsigned long)(arg1); \
4379 _argvec[2] = (unsigned long)(arg2); \
4380 _argvec[3] = (unsigned long)(arg3); \
4381 _argvec[4] = (unsigned long)(arg4); \
4382 _argvec[5] = (unsigned long)(arg5); \
4383 _argvec[6] = (unsigned long)(arg6); \
4385 VALGRIND_ALIGN_STACK \
4386 "ldr x0, [%1, #8] \n\t" \
4387 "ldr x1, [%1, #16] \n\t" \
4388 "ldr x2, [%1, #24] \n\t" \
4389 "ldr x3, [%1, #32] \n\t" \
4390 "ldr x4, [%1, #40] \n\t" \
4391 "ldr x5, [%1, #48] \n\t" \
4392 "ldr x8, [%1] \n\t" /* target->x8 */ \
4393 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4394 VALGRIND_RESTORE_STACK \
4396 : /*out*/ "=r" (_res) \
4397 : /*in*/ "0" (&_argvec[0]) \
4398 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4400 lval = (__typeof__(lval)) _res; \
4403 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4406 volatile OrigFn _orig = (orig); \
4407 volatile unsigned long _argvec[8]; \
4408 volatile unsigned long _res; \
4409 _argvec[0] = (unsigned long)_orig.nraddr; \
4410 _argvec[1] = (unsigned long)(arg1); \
4411 _argvec[2] = (unsigned long)(arg2); \
4412 _argvec[3] = (unsigned long)(arg3); \
4413 _argvec[4] = (unsigned long)(arg4); \
4414 _argvec[5] = (unsigned long)(arg5); \
4415 _argvec[6] = (unsigned long)(arg6); \
4416 _argvec[7] = (unsigned long)(arg7); \
4418 VALGRIND_ALIGN_STACK \
4419 "ldr x0, [%1, #8] \n\t" \
4420 "ldr x1, [%1, #16] \n\t" \
4421 "ldr x2, [%1, #24] \n\t" \
4422 "ldr x3, [%1, #32] \n\t" \
4423 "ldr x4, [%1, #40] \n\t" \
4424 "ldr x5, [%1, #48] \n\t" \
4425 "ldr x6, [%1, #56] \n\t" \
4426 "ldr x8, [%1] \n\t" /* target->x8 */ \
4427 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4428 VALGRIND_RESTORE_STACK \
4430 : /*out*/ "=r" (_res) \
4431 : /*in*/ "0" (&_argvec[0]) \
4432 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4434 lval = (__typeof__(lval)) _res; \
4437 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4440 volatile OrigFn _orig = (orig); \
4441 volatile unsigned long _argvec[9]; \
4442 volatile unsigned long _res; \
4443 _argvec[0] = (unsigned long)_orig.nraddr; \
4444 _argvec[1] = (unsigned long)(arg1); \
4445 _argvec[2] = (unsigned long)(arg2); \
4446 _argvec[3] = (unsigned long)(arg3); \
4447 _argvec[4] = (unsigned long)(arg4); \
4448 _argvec[5] = (unsigned long)(arg5); \
4449 _argvec[6] = (unsigned long)(arg6); \
4450 _argvec[7] = (unsigned long)(arg7); \
4451 _argvec[8] = (unsigned long)(arg8); \
4453 VALGRIND_ALIGN_STACK \
4454 "ldr x0, [%1, #8] \n\t" \
4455 "ldr x1, [%1, #16] \n\t" \
4456 "ldr x2, [%1, #24] \n\t" \
4457 "ldr x3, [%1, #32] \n\t" \
4458 "ldr x4, [%1, #40] \n\t" \
4459 "ldr x5, [%1, #48] \n\t" \
4460 "ldr x6, [%1, #56] \n\t" \
4461 "ldr x7, [%1, #64] \n\t" \
4462 "ldr x8, [%1] \n\t" /* target->x8 */ \
4463 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4464 VALGRIND_RESTORE_STACK \
4466 : /*out*/ "=r" (_res) \
4467 : /*in*/ "0" (&_argvec[0]) \
4468 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4470 lval = (__typeof__(lval)) _res; \
4473 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4476 volatile OrigFn _orig = (orig); \
4477 volatile unsigned long _argvec[10]; \
4478 volatile unsigned long _res; \
4479 _argvec[0] = (unsigned long)_orig.nraddr; \
4480 _argvec[1] = (unsigned long)(arg1); \
4481 _argvec[2] = (unsigned long)(arg2); \
4482 _argvec[3] = (unsigned long)(arg3); \
4483 _argvec[4] = (unsigned long)(arg4); \
4484 _argvec[5] = (unsigned long)(arg5); \
4485 _argvec[6] = (unsigned long)(arg6); \
4486 _argvec[7] = (unsigned long)(arg7); \
4487 _argvec[8] = (unsigned long)(arg8); \
4488 _argvec[9] = (unsigned long)(arg9); \
4490 VALGRIND_ALIGN_STACK \
4491 "sub sp, sp, #0x20 \n\t" \
4492 "ldr x0, [%1, #8] \n\t" \
4493 "ldr x1, [%1, #16] \n\t" \
4494 "ldr x2, [%1, #24] \n\t" \
4495 "ldr x3, [%1, #32] \n\t" \
4496 "ldr x4, [%1, #40] \n\t" \
4497 "ldr x5, [%1, #48] \n\t" \
4498 "ldr x6, [%1, #56] \n\t" \
4499 "ldr x7, [%1, #64] \n\t" \
4500 "ldr x8, [%1, #72] \n\t" \
4501 "str x8, [sp, #0] \n\t" \
4502 "ldr x8, [%1] \n\t" /* target->x8 */ \
4503 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4504 VALGRIND_RESTORE_STACK \
4506 : /*out*/ "=r" (_res) \
4507 : /*in*/ "0" (&_argvec[0]) \
4508 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4510 lval = (__typeof__(lval)) _res; \
4513 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4514 arg7,arg8,arg9,arg10) \
4516 volatile OrigFn _orig = (orig); \
4517 volatile unsigned long _argvec[11]; \
4518 volatile unsigned long _res; \
4519 _argvec[0] = (unsigned long)_orig.nraddr; \
4520 _argvec[1] = (unsigned long)(arg1); \
4521 _argvec[2] = (unsigned long)(arg2); \
4522 _argvec[3] = (unsigned long)(arg3); \
4523 _argvec[4] = (unsigned long)(arg4); \
4524 _argvec[5] = (unsigned long)(arg5); \
4525 _argvec[6] = (unsigned long)(arg6); \
4526 _argvec[7] = (unsigned long)(arg7); \
4527 _argvec[8] = (unsigned long)(arg8); \
4528 _argvec[9] = (unsigned long)(arg9); \
4529 _argvec[10] = (unsigned long)(arg10); \
4531 VALGRIND_ALIGN_STACK \
4532 "sub sp, sp, #0x20 \n\t" \
4533 "ldr x0, [%1, #8] \n\t" \
4534 "ldr x1, [%1, #16] \n\t" \
4535 "ldr x2, [%1, #24] \n\t" \
4536 "ldr x3, [%1, #32] \n\t" \
4537 "ldr x4, [%1, #40] \n\t" \
4538 "ldr x5, [%1, #48] \n\t" \
4539 "ldr x6, [%1, #56] \n\t" \
4540 "ldr x7, [%1, #64] \n\t" \
4541 "ldr x8, [%1, #72] \n\t" \
4542 "str x8, [sp, #0] \n\t" \
4543 "ldr x8, [%1, #80] \n\t" \
4544 "str x8, [sp, #8] \n\t" \
4545 "ldr x8, [%1] \n\t" /* target->x8 */ \
4546 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4547 VALGRIND_RESTORE_STACK \
4549 : /*out*/ "=r" (_res) \
4550 : /*in*/ "0" (&_argvec[0]) \
4551 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4553 lval = (__typeof__(lval)) _res; \
4556 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4557 arg7,arg8,arg9,arg10,arg11) \
4559 volatile OrigFn _orig = (orig); \
4560 volatile unsigned long _argvec[12]; \
4561 volatile unsigned long _res; \
4562 _argvec[0] = (unsigned long)_orig.nraddr; \
4563 _argvec[1] = (unsigned long)(arg1); \
4564 _argvec[2] = (unsigned long)(arg2); \
4565 _argvec[3] = (unsigned long)(arg3); \
4566 _argvec[4] = (unsigned long)(arg4); \
4567 _argvec[5] = (unsigned long)(arg5); \
4568 _argvec[6] = (unsigned long)(arg6); \
4569 _argvec[7] = (unsigned long)(arg7); \
4570 _argvec[8] = (unsigned long)(arg8); \
4571 _argvec[9] = (unsigned long)(arg9); \
4572 _argvec[10] = (unsigned long)(arg10); \
4573 _argvec[11] = (unsigned long)(arg11); \
4575 VALGRIND_ALIGN_STACK \
4576 "sub sp, sp, #0x30 \n\t" \
4577 "ldr x0, [%1, #8] \n\t" \
4578 "ldr x1, [%1, #16] \n\t" \
4579 "ldr x2, [%1, #24] \n\t" \
4580 "ldr x3, [%1, #32] \n\t" \
4581 "ldr x4, [%1, #40] \n\t" \
4582 "ldr x5, [%1, #48] \n\t" \
4583 "ldr x6, [%1, #56] \n\t" \
4584 "ldr x7, [%1, #64] \n\t" \
4585 "ldr x8, [%1, #72] \n\t" \
4586 "str x8, [sp, #0] \n\t" \
4587 "ldr x8, [%1, #80] \n\t" \
4588 "str x8, [sp, #8] \n\t" \
4589 "ldr x8, [%1, #88] \n\t" \
4590 "str x8, [sp, #16] \n\t" \
4591 "ldr x8, [%1] \n\t" /* target->x8 */ \
4592 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4593 VALGRIND_RESTORE_STACK \
4595 : /*out*/ "=r" (_res) \
4596 : /*in*/ "0" (&_argvec[0]) \
4597 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4599 lval = (__typeof__(lval)) _res; \
4602 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4603 arg7,arg8,arg9,arg10,arg11, \
4606 volatile OrigFn _orig = (orig); \
4607 volatile unsigned long _argvec[13]; \
4608 volatile unsigned long _res; \
4609 _argvec[0] = (unsigned long)_orig.nraddr; \
4610 _argvec[1] = (unsigned long)(arg1); \
4611 _argvec[2] = (unsigned long)(arg2); \
4612 _argvec[3] = (unsigned long)(arg3); \
4613 _argvec[4] = (unsigned long)(arg4); \
4614 _argvec[5] = (unsigned long)(arg5); \
4615 _argvec[6] = (unsigned long)(arg6); \
4616 _argvec[7] = (unsigned long)(arg7); \
4617 _argvec[8] = (unsigned long)(arg8); \
4618 _argvec[9] = (unsigned long)(arg9); \
4619 _argvec[10] = (unsigned long)(arg10); \
4620 _argvec[11] = (unsigned long)(arg11); \
4621 _argvec[12] = (unsigned long)(arg12); \
4623 VALGRIND_ALIGN_STACK \
4624 "sub sp, sp, #0x30 \n\t" \
4625 "ldr x0, [%1, #8] \n\t" \
4626 "ldr x1, [%1, #16] \n\t" \
4627 "ldr x2, [%1, #24] \n\t" \
4628 "ldr x3, [%1, #32] \n\t" \
4629 "ldr x4, [%1, #40] \n\t" \
4630 "ldr x5, [%1, #48] \n\t" \
4631 "ldr x6, [%1, #56] \n\t" \
4632 "ldr x7, [%1, #64] \n\t" \
4633 "ldr x8, [%1, #72] \n\t" \
4634 "str x8, [sp, #0] \n\t" \
4635 "ldr x8, [%1, #80] \n\t" \
4636 "str x8, [sp, #8] \n\t" \
4637 "ldr x8, [%1, #88] \n\t" \
4638 "str x8, [sp, #16] \n\t" \
4639 "ldr x8, [%1, #96] \n\t" \
4640 "str x8, [sp, #24] \n\t" \
4641 "ldr x8, [%1] \n\t" /* target->x8 */ \
4642 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4643 VALGRIND_RESTORE_STACK \
4645 : /*out*/ "=r" (_res) \
4646 : /*in*/ "0" (&_argvec[0]) \
4647 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4649 lval = (__typeof__(lval)) _res; \
4652 #endif /* PLAT_arm64_linux */
4654 /* ------------------------- s390x-linux ------------------------- */
4656 #if defined(PLAT_s390x_linux)
4658 /* Similar workaround as amd64 (see above), but we use r11 as frame
4659 pointer and save the old r11 in r7. r11 might be used for
4660 argvec, therefore we copy argvec in r1 since r1 is clobbered
4661 after the call anyway. */
4662 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4663 # define __FRAME_POINTER \
4664 ,"d"(__builtin_dwarf_cfa())
4665 # define VALGRIND_CFI_PROLOGUE \
4666 ".cfi_remember_state\n\t" \
4667 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4670 ".cfi_def_cfa r11, 0\n\t"
4671 # define VALGRIND_CFI_EPILOGUE \
4673 ".cfi_restore_state\n\t"
4675 # define __FRAME_POINTER
4676 # define VALGRIND_CFI_PROLOGUE \
4678 # define VALGRIND_CFI_EPILOGUE
4681 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4682 according to the s390 GCC maintainer. (The ABI specification is not
4683 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4684 VALGRIND_RESTORE_STACK are not defined here. */
4686 /* These regs are trashed by the hidden call. Note that we overwrite
4687 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4688 function a proper return address. All others are ABI defined call
4690 #if defined(__VX__) || defined(__S390_VX__)
4691 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4692 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4693 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4694 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4695 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4697 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4698 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4701 /* Nb: Although r11 is modified in the asm snippets below (inside
4702 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4704 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4706 (2) GCC will complain that r11 cannot appear inside a clobber section,
4707 when compiled with -O -fno-omit-frame-pointer
4710 #define CALL_FN_W_v(lval, orig) \
4712 volatile OrigFn _orig = (orig); \
4713 volatile unsigned long _argvec[1]; \
4714 volatile unsigned long _res; \
4715 _argvec[0] = (unsigned long)_orig.nraddr; \
4717 VALGRIND_CFI_PROLOGUE \
4718 "aghi 15,-160\n\t" \
4719 "lg 1, 0(1)\n\t" /* target->r1 */ \
4720 VALGRIND_CALL_NOREDIR_R1 \
4722 VALGRIND_CFI_EPILOGUE \
4724 : /*out*/ "=d" (_res) \
4725 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4726 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4728 lval = (__typeof__(lval)) _res; \
4731 /* The call abi has the arguments in r2-r6 and stack */
4732 #define CALL_FN_W_W(lval, orig, arg1) \
4734 volatile OrigFn _orig = (orig); \
4735 volatile unsigned long _argvec[2]; \
4736 volatile unsigned long _res; \
4737 _argvec[0] = (unsigned long)_orig.nraddr; \
4738 _argvec[1] = (unsigned long)arg1; \
4740 VALGRIND_CFI_PROLOGUE \
4741 "aghi 15,-160\n\t" \
4744 VALGRIND_CALL_NOREDIR_R1 \
4746 VALGRIND_CFI_EPILOGUE \
4748 : /*out*/ "=d" (_res) \
4749 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4750 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4752 lval = (__typeof__(lval)) _res; \
4755 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4757 volatile OrigFn _orig = (orig); \
4758 volatile unsigned long _argvec[3]; \
4759 volatile unsigned long _res; \
4760 _argvec[0] = (unsigned long)_orig.nraddr; \
4761 _argvec[1] = (unsigned long)arg1; \
4762 _argvec[2] = (unsigned long)arg2; \
4764 VALGRIND_CFI_PROLOGUE \
4765 "aghi 15,-160\n\t" \
4769 VALGRIND_CALL_NOREDIR_R1 \
4771 VALGRIND_CFI_EPILOGUE \
4773 : /*out*/ "=d" (_res) \
4774 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4775 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4777 lval = (__typeof__(lval)) _res; \
4780 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4782 volatile OrigFn _orig = (orig); \
4783 volatile unsigned long _argvec[4]; \
4784 volatile unsigned long _res; \
4785 _argvec[0] = (unsigned long)_orig.nraddr; \
4786 _argvec[1] = (unsigned long)arg1; \
4787 _argvec[2] = (unsigned long)arg2; \
4788 _argvec[3] = (unsigned long)arg3; \
4790 VALGRIND_CFI_PROLOGUE \
4791 "aghi 15,-160\n\t" \
4796 VALGRIND_CALL_NOREDIR_R1 \
4798 VALGRIND_CFI_EPILOGUE \
4800 : /*out*/ "=d" (_res) \
4801 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4802 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4804 lval = (__typeof__(lval)) _res; \
4807 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4809 volatile OrigFn _orig = (orig); \
4810 volatile unsigned long _argvec[5]; \
4811 volatile unsigned long _res; \
4812 _argvec[0] = (unsigned long)_orig.nraddr; \
4813 _argvec[1] = (unsigned long)arg1; \
4814 _argvec[2] = (unsigned long)arg2; \
4815 _argvec[3] = (unsigned long)arg3; \
4816 _argvec[4] = (unsigned long)arg4; \
4818 VALGRIND_CFI_PROLOGUE \
4819 "aghi 15,-160\n\t" \
4825 VALGRIND_CALL_NOREDIR_R1 \
4827 VALGRIND_CFI_EPILOGUE \
4829 : /*out*/ "=d" (_res) \
4830 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4831 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4833 lval = (__typeof__(lval)) _res; \
4836 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4838 volatile OrigFn _orig = (orig); \
4839 volatile unsigned long _argvec[6]; \
4840 volatile unsigned long _res; \
4841 _argvec[0] = (unsigned long)_orig.nraddr; \
4842 _argvec[1] = (unsigned long)arg1; \
4843 _argvec[2] = (unsigned long)arg2; \
4844 _argvec[3] = (unsigned long)arg3; \
4845 _argvec[4] = (unsigned long)arg4; \
4846 _argvec[5] = (unsigned long)arg5; \
4848 VALGRIND_CFI_PROLOGUE \
4849 "aghi 15,-160\n\t" \
4856 VALGRIND_CALL_NOREDIR_R1 \
4858 VALGRIND_CFI_EPILOGUE \
4860 : /*out*/ "=d" (_res) \
4861 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4862 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4864 lval = (__typeof__(lval)) _res; \
4867 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4870 volatile OrigFn _orig = (orig); \
4871 volatile unsigned long _argvec[7]; \
4872 volatile unsigned long _res; \
4873 _argvec[0] = (unsigned long)_orig.nraddr; \
4874 _argvec[1] = (unsigned long)arg1; \
4875 _argvec[2] = (unsigned long)arg2; \
4876 _argvec[3] = (unsigned long)arg3; \
4877 _argvec[4] = (unsigned long)arg4; \
4878 _argvec[5] = (unsigned long)arg5; \
4879 _argvec[6] = (unsigned long)arg6; \
4881 VALGRIND_CFI_PROLOGUE \
4882 "aghi 15,-168\n\t" \
4888 "mvc 160(8,15), 48(1)\n\t" \
4890 VALGRIND_CALL_NOREDIR_R1 \
4892 VALGRIND_CFI_EPILOGUE \
4894 : /*out*/ "=d" (_res) \
4895 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4896 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4898 lval = (__typeof__(lval)) _res; \
4901 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4904 volatile OrigFn _orig = (orig); \
4905 volatile unsigned long _argvec[8]; \
4906 volatile unsigned long _res; \
4907 _argvec[0] = (unsigned long)_orig.nraddr; \
4908 _argvec[1] = (unsigned long)arg1; \
4909 _argvec[2] = (unsigned long)arg2; \
4910 _argvec[3] = (unsigned long)arg3; \
4911 _argvec[4] = (unsigned long)arg4; \
4912 _argvec[5] = (unsigned long)arg5; \
4913 _argvec[6] = (unsigned long)arg6; \
4914 _argvec[7] = (unsigned long)arg7; \
4916 VALGRIND_CFI_PROLOGUE \
4917 "aghi 15,-176\n\t" \
4923 "mvc 160(8,15), 48(1)\n\t" \
4924 "mvc 168(8,15), 56(1)\n\t" \
4926 VALGRIND_CALL_NOREDIR_R1 \
4928 VALGRIND_CFI_EPILOGUE \
4930 : /*out*/ "=d" (_res) \
4931 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4932 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4934 lval = (__typeof__(lval)) _res; \
4937 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4940 volatile OrigFn _orig = (orig); \
4941 volatile unsigned long _argvec[9]; \
4942 volatile unsigned long _res; \
4943 _argvec[0] = (unsigned long)_orig.nraddr; \
4944 _argvec[1] = (unsigned long)arg1; \
4945 _argvec[2] = (unsigned long)arg2; \
4946 _argvec[3] = (unsigned long)arg3; \
4947 _argvec[4] = (unsigned long)arg4; \
4948 _argvec[5] = (unsigned long)arg5; \
4949 _argvec[6] = (unsigned long)arg6; \
4950 _argvec[7] = (unsigned long)arg7; \
4951 _argvec[8] = (unsigned long)arg8; \
4953 VALGRIND_CFI_PROLOGUE \
4954 "aghi 15,-184\n\t" \
4960 "mvc 160(8,15), 48(1)\n\t" \
4961 "mvc 168(8,15), 56(1)\n\t" \
4962 "mvc 176(8,15), 64(1)\n\t" \
4964 VALGRIND_CALL_NOREDIR_R1 \
4966 VALGRIND_CFI_EPILOGUE \
4968 : /*out*/ "=d" (_res) \
4969 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4970 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4972 lval = (__typeof__(lval)) _res; \
4975 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4976 arg6, arg7 ,arg8, arg9) \
4978 volatile OrigFn _orig = (orig); \
4979 volatile unsigned long _argvec[10]; \
4980 volatile unsigned long _res; \
4981 _argvec[0] = (unsigned long)_orig.nraddr; \
4982 _argvec[1] = (unsigned long)arg1; \
4983 _argvec[2] = (unsigned long)arg2; \
4984 _argvec[3] = (unsigned long)arg3; \
4985 _argvec[4] = (unsigned long)arg4; \
4986 _argvec[5] = (unsigned long)arg5; \
4987 _argvec[6] = (unsigned long)arg6; \
4988 _argvec[7] = (unsigned long)arg7; \
4989 _argvec[8] = (unsigned long)arg8; \
4990 _argvec[9] = (unsigned long)arg9; \
4992 VALGRIND_CFI_PROLOGUE \
4993 "aghi 15,-192\n\t" \
4999 "mvc 160(8,15), 48(1)\n\t" \
5000 "mvc 168(8,15), 56(1)\n\t" \
5001 "mvc 176(8,15), 64(1)\n\t" \
5002 "mvc 184(8,15), 72(1)\n\t" \
5004 VALGRIND_CALL_NOREDIR_R1 \
5006 VALGRIND_CFI_EPILOGUE \
5008 : /*out*/ "=d" (_res) \
5009 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5010 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5012 lval = (__typeof__(lval)) _res; \
5015 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5016 arg6, arg7 ,arg8, arg9, arg10) \
5018 volatile OrigFn _orig = (orig); \
5019 volatile unsigned long _argvec[11]; \
5020 volatile unsigned long _res; \
5021 _argvec[0] = (unsigned long)_orig.nraddr; \
5022 _argvec[1] = (unsigned long)arg1; \
5023 _argvec[2] = (unsigned long)arg2; \
5024 _argvec[3] = (unsigned long)arg3; \
5025 _argvec[4] = (unsigned long)arg4; \
5026 _argvec[5] = (unsigned long)arg5; \
5027 _argvec[6] = (unsigned long)arg6; \
5028 _argvec[7] = (unsigned long)arg7; \
5029 _argvec[8] = (unsigned long)arg8; \
5030 _argvec[9] = (unsigned long)arg9; \
5031 _argvec[10] = (unsigned long)arg10; \
5033 VALGRIND_CFI_PROLOGUE \
5034 "aghi 15,-200\n\t" \
5040 "mvc 160(8,15), 48(1)\n\t" \
5041 "mvc 168(8,15), 56(1)\n\t" \
5042 "mvc 176(8,15), 64(1)\n\t" \
5043 "mvc 184(8,15), 72(1)\n\t" \
5044 "mvc 192(8,15), 80(1)\n\t" \
5046 VALGRIND_CALL_NOREDIR_R1 \
5048 VALGRIND_CFI_EPILOGUE \
5050 : /*out*/ "=d" (_res) \
5051 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5052 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5054 lval = (__typeof__(lval)) _res; \
5057 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5058 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5060 volatile OrigFn _orig = (orig); \
5061 volatile unsigned long _argvec[12]; \
5062 volatile unsigned long _res; \
5063 _argvec[0] = (unsigned long)_orig.nraddr; \
5064 _argvec[1] = (unsigned long)arg1; \
5065 _argvec[2] = (unsigned long)arg2; \
5066 _argvec[3] = (unsigned long)arg3; \
5067 _argvec[4] = (unsigned long)arg4; \
5068 _argvec[5] = (unsigned long)arg5; \
5069 _argvec[6] = (unsigned long)arg6; \
5070 _argvec[7] = (unsigned long)arg7; \
5071 _argvec[8] = (unsigned long)arg8; \
5072 _argvec[9] = (unsigned long)arg9; \
5073 _argvec[10] = (unsigned long)arg10; \
5074 _argvec[11] = (unsigned long)arg11; \
5076 VALGRIND_CFI_PROLOGUE \
5077 "aghi 15,-208\n\t" \
5083 "mvc 160(8,15), 48(1)\n\t" \
5084 "mvc 168(8,15), 56(1)\n\t" \
5085 "mvc 176(8,15), 64(1)\n\t" \
5086 "mvc 184(8,15), 72(1)\n\t" \
5087 "mvc 192(8,15), 80(1)\n\t" \
5088 "mvc 200(8,15), 88(1)\n\t" \
5090 VALGRIND_CALL_NOREDIR_R1 \
5092 VALGRIND_CFI_EPILOGUE \
5094 : /*out*/ "=d" (_res) \
5095 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5096 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5098 lval = (__typeof__(lval)) _res; \
5101 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5102 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5104 volatile OrigFn _orig = (orig); \
5105 volatile unsigned long _argvec[13]; \
5106 volatile unsigned long _res; \
5107 _argvec[0] = (unsigned long)_orig.nraddr; \
5108 _argvec[1] = (unsigned long)arg1; \
5109 _argvec[2] = (unsigned long)arg2; \
5110 _argvec[3] = (unsigned long)arg3; \
5111 _argvec[4] = (unsigned long)arg4; \
5112 _argvec[5] = (unsigned long)arg5; \
5113 _argvec[6] = (unsigned long)arg6; \
5114 _argvec[7] = (unsigned long)arg7; \
5115 _argvec[8] = (unsigned long)arg8; \
5116 _argvec[9] = (unsigned long)arg9; \
5117 _argvec[10] = (unsigned long)arg10; \
5118 _argvec[11] = (unsigned long)arg11; \
5119 _argvec[12] = (unsigned long)arg12; \
5121 VALGRIND_CFI_PROLOGUE \
5122 "aghi 15,-216\n\t" \
5128 "mvc 160(8,15), 48(1)\n\t" \
5129 "mvc 168(8,15), 56(1)\n\t" \
5130 "mvc 176(8,15), 64(1)\n\t" \
5131 "mvc 184(8,15), 72(1)\n\t" \
5132 "mvc 192(8,15), 80(1)\n\t" \
5133 "mvc 200(8,15), 88(1)\n\t" \
5134 "mvc 208(8,15), 96(1)\n\t" \
5136 VALGRIND_CALL_NOREDIR_R1 \
5138 VALGRIND_CFI_EPILOGUE \
5140 : /*out*/ "=d" (_res) \
5141 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5142 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5144 lval = (__typeof__(lval)) _res; \
5148 #endif /* PLAT_s390x_linux */
5150 /* ------------------------- mips32-linux ----------------------- */
5152 #if defined(PLAT_mips32_linux)
5154 /* These regs are trashed by the hidden call. */
5155 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5156 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5159 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5162 #define CALL_FN_W_v(lval, orig) \
5164 volatile OrigFn _orig = (orig); \
5165 volatile unsigned long _argvec[1]; \
5166 volatile unsigned long _res; \
5167 _argvec[0] = (unsigned long)_orig.nraddr; \
5169 "subu $29, $29, 8 \n\t" \
5170 "sw $28, 0($29) \n\t" \
5171 "sw $31, 4($29) \n\t" \
5172 "subu $29, $29, 16 \n\t" \
5173 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5174 VALGRIND_CALL_NOREDIR_T9 \
5175 "addu $29, $29, 16\n\t" \
5176 "lw $28, 0($29) \n\t" \
5177 "lw $31, 4($29) \n\t" \
5178 "addu $29, $29, 8 \n\t" \
5180 : /*out*/ "=r" (_res) \
5181 : /*in*/ "0" (&_argvec[0]) \
5182 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5184 lval = (__typeof__(lval)) _res; \
5187 #define CALL_FN_W_W(lval, orig, arg1) \
5189 volatile OrigFn _orig = (orig); \
5190 volatile unsigned long _argvec[2]; \
5191 volatile unsigned long _res; \
5192 _argvec[0] = (unsigned long)_orig.nraddr; \
5193 _argvec[1] = (unsigned long)(arg1); \
5195 "subu $29, $29, 8 \n\t" \
5196 "sw $28, 0($29) \n\t" \
5197 "sw $31, 4($29) \n\t" \
5198 "subu $29, $29, 16 \n\t" \
5199 "lw $4, 4(%1) \n\t" /* arg1*/ \
5200 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5201 VALGRIND_CALL_NOREDIR_T9 \
5202 "addu $29, $29, 16 \n\t" \
5203 "lw $28, 0($29) \n\t" \
5204 "lw $31, 4($29) \n\t" \
5205 "addu $29, $29, 8 \n\t" \
5207 : /*out*/ "=r" (_res) \
5208 : /*in*/ "0" (&_argvec[0]) \
5209 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5211 lval = (__typeof__(lval)) _res; \
5214 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5216 volatile OrigFn _orig = (orig); \
5217 volatile unsigned long _argvec[3]; \
5218 volatile unsigned long _res; \
5219 _argvec[0] = (unsigned long)_orig.nraddr; \
5220 _argvec[1] = (unsigned long)(arg1); \
5221 _argvec[2] = (unsigned long)(arg2); \
5223 "subu $29, $29, 8 \n\t" \
5224 "sw $28, 0($29) \n\t" \
5225 "sw $31, 4($29) \n\t" \
5226 "subu $29, $29, 16 \n\t" \
5227 "lw $4, 4(%1) \n\t" \
5228 "lw $5, 8(%1) \n\t" \
5229 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5230 VALGRIND_CALL_NOREDIR_T9 \
5231 "addu $29, $29, 16 \n\t" \
5232 "lw $28, 0($29) \n\t" \
5233 "lw $31, 4($29) \n\t" \
5234 "addu $29, $29, 8 \n\t" \
5236 : /*out*/ "=r" (_res) \
5237 : /*in*/ "0" (&_argvec[0]) \
5238 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5240 lval = (__typeof__(lval)) _res; \
5243 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5245 volatile OrigFn _orig = (orig); \
5246 volatile unsigned long _argvec[4]; \
5247 volatile unsigned long _res; \
5248 _argvec[0] = (unsigned long)_orig.nraddr; \
5249 _argvec[1] = (unsigned long)(arg1); \
5250 _argvec[2] = (unsigned long)(arg2); \
5251 _argvec[3] = (unsigned long)(arg3); \
5253 "subu $29, $29, 8 \n\t" \
5254 "sw $28, 0($29) \n\t" \
5255 "sw $31, 4($29) \n\t" \
5256 "subu $29, $29, 16 \n\t" \
5257 "lw $4, 4(%1) \n\t" \
5258 "lw $5, 8(%1) \n\t" \
5259 "lw $6, 12(%1) \n\t" \
5260 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5261 VALGRIND_CALL_NOREDIR_T9 \
5262 "addu $29, $29, 16 \n\t" \
5263 "lw $28, 0($29) \n\t" \
5264 "lw $31, 4($29) \n\t" \
5265 "addu $29, $29, 8 \n\t" \
5267 : /*out*/ "=r" (_res) \
5268 : /*in*/ "0" (&_argvec[0]) \
5269 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5271 lval = (__typeof__(lval)) _res; \
5274 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5276 volatile OrigFn _orig = (orig); \
5277 volatile unsigned long _argvec[5]; \
5278 volatile unsigned long _res; \
5279 _argvec[0] = (unsigned long)_orig.nraddr; \
5280 _argvec[1] = (unsigned long)(arg1); \
5281 _argvec[2] = (unsigned long)(arg2); \
5282 _argvec[3] = (unsigned long)(arg3); \
5283 _argvec[4] = (unsigned long)(arg4); \
5285 "subu $29, $29, 8 \n\t" \
5286 "sw $28, 0($29) \n\t" \
5287 "sw $31, 4($29) \n\t" \
5288 "subu $29, $29, 16 \n\t" \
5289 "lw $4, 4(%1) \n\t" \
5290 "lw $5, 8(%1) \n\t" \
5291 "lw $6, 12(%1) \n\t" \
5292 "lw $7, 16(%1) \n\t" \
5293 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5294 VALGRIND_CALL_NOREDIR_T9 \
5295 "addu $29, $29, 16 \n\t" \
5296 "lw $28, 0($29) \n\t" \
5297 "lw $31, 4($29) \n\t" \
5298 "addu $29, $29, 8 \n\t" \
5300 : /*out*/ "=r" (_res) \
5301 : /*in*/ "0" (&_argvec[0]) \
5302 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5304 lval = (__typeof__(lval)) _res; \
5307 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5309 volatile OrigFn _orig = (orig); \
5310 volatile unsigned long _argvec[6]; \
5311 volatile unsigned long _res; \
5312 _argvec[0] = (unsigned long)_orig.nraddr; \
5313 _argvec[1] = (unsigned long)(arg1); \
5314 _argvec[2] = (unsigned long)(arg2); \
5315 _argvec[3] = (unsigned long)(arg3); \
5316 _argvec[4] = (unsigned long)(arg4); \
5317 _argvec[5] = (unsigned long)(arg5); \
5319 "subu $29, $29, 8 \n\t" \
5320 "sw $28, 0($29) \n\t" \
5321 "sw $31, 4($29) \n\t" \
5322 "lw $4, 20(%1) \n\t" \
5323 "subu $29, $29, 24\n\t" \
5324 "sw $4, 16($29) \n\t" \
5325 "lw $4, 4(%1) \n\t" \
5326 "lw $5, 8(%1) \n\t" \
5327 "lw $6, 12(%1) \n\t" \
5328 "lw $7, 16(%1) \n\t" \
5329 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5330 VALGRIND_CALL_NOREDIR_T9 \
5331 "addu $29, $29, 24 \n\t" \
5332 "lw $28, 0($29) \n\t" \
5333 "lw $31, 4($29) \n\t" \
5334 "addu $29, $29, 8 \n\t" \
5336 : /*out*/ "=r" (_res) \
5337 : /*in*/ "0" (&_argvec[0]) \
5338 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5340 lval = (__typeof__(lval)) _res; \
5342 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5344 volatile OrigFn _orig = (orig); \
5345 volatile unsigned long _argvec[7]; \
5346 volatile unsigned long _res; \
5347 _argvec[0] = (unsigned long)_orig.nraddr; \
5348 _argvec[1] = (unsigned long)(arg1); \
5349 _argvec[2] = (unsigned long)(arg2); \
5350 _argvec[3] = (unsigned long)(arg3); \
5351 _argvec[4] = (unsigned long)(arg4); \
5352 _argvec[5] = (unsigned long)(arg5); \
5353 _argvec[6] = (unsigned long)(arg6); \
5355 "subu $29, $29, 8 \n\t" \
5356 "sw $28, 0($29) \n\t" \
5357 "sw $31, 4($29) \n\t" \
5358 "lw $4, 20(%1) \n\t" \
5359 "subu $29, $29, 32\n\t" \
5360 "sw $4, 16($29) \n\t" \
5361 "lw $4, 24(%1) \n\t" \
5363 "sw $4, 20($29) \n\t" \
5364 "lw $4, 4(%1) \n\t" \
5365 "lw $5, 8(%1) \n\t" \
5366 "lw $6, 12(%1) \n\t" \
5367 "lw $7, 16(%1) \n\t" \
5368 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5369 VALGRIND_CALL_NOREDIR_T9 \
5370 "addu $29, $29, 32 \n\t" \
5371 "lw $28, 0($29) \n\t" \
5372 "lw $31, 4($29) \n\t" \
5373 "addu $29, $29, 8 \n\t" \
5375 : /*out*/ "=r" (_res) \
5376 : /*in*/ "0" (&_argvec[0]) \
5377 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5379 lval = (__typeof__(lval)) _res; \
5382 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5385 volatile OrigFn _orig = (orig); \
5386 volatile unsigned long _argvec[8]; \
5387 volatile unsigned long _res; \
5388 _argvec[0] = (unsigned long)_orig.nraddr; \
5389 _argvec[1] = (unsigned long)(arg1); \
5390 _argvec[2] = (unsigned long)(arg2); \
5391 _argvec[3] = (unsigned long)(arg3); \
5392 _argvec[4] = (unsigned long)(arg4); \
5393 _argvec[5] = (unsigned long)(arg5); \
5394 _argvec[6] = (unsigned long)(arg6); \
5395 _argvec[7] = (unsigned long)(arg7); \
5397 "subu $29, $29, 8 \n\t" \
5398 "sw $28, 0($29) \n\t" \
5399 "sw $31, 4($29) \n\t" \
5400 "lw $4, 20(%1) \n\t" \
5401 "subu $29, $29, 32\n\t" \
5402 "sw $4, 16($29) \n\t" \
5403 "lw $4, 24(%1) \n\t" \
5404 "sw $4, 20($29) \n\t" \
5405 "lw $4, 28(%1) \n\t" \
5406 "sw $4, 24($29) \n\t" \
5407 "lw $4, 4(%1) \n\t" \
5408 "lw $5, 8(%1) \n\t" \
5409 "lw $6, 12(%1) \n\t" \
5410 "lw $7, 16(%1) \n\t" \
5411 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5412 VALGRIND_CALL_NOREDIR_T9 \
5413 "addu $29, $29, 32 \n\t" \
5414 "lw $28, 0($29) \n\t" \
5415 "lw $31, 4($29) \n\t" \
5416 "addu $29, $29, 8 \n\t" \
5418 : /*out*/ "=r" (_res) \
5419 : /*in*/ "0" (&_argvec[0]) \
5420 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5422 lval = (__typeof__(lval)) _res; \
5425 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5428 volatile OrigFn _orig = (orig); \
5429 volatile unsigned long _argvec[9]; \
5430 volatile unsigned long _res; \
5431 _argvec[0] = (unsigned long)_orig.nraddr; \
5432 _argvec[1] = (unsigned long)(arg1); \
5433 _argvec[2] = (unsigned long)(arg2); \
5434 _argvec[3] = (unsigned long)(arg3); \
5435 _argvec[4] = (unsigned long)(arg4); \
5436 _argvec[5] = (unsigned long)(arg5); \
5437 _argvec[6] = (unsigned long)(arg6); \
5438 _argvec[7] = (unsigned long)(arg7); \
5439 _argvec[8] = (unsigned long)(arg8); \
5441 "subu $29, $29, 8 \n\t" \
5442 "sw $28, 0($29) \n\t" \
5443 "sw $31, 4($29) \n\t" \
5444 "lw $4, 20(%1) \n\t" \
5445 "subu $29, $29, 40\n\t" \
5446 "sw $4, 16($29) \n\t" \
5447 "lw $4, 24(%1) \n\t" \
5448 "sw $4, 20($29) \n\t" \
5449 "lw $4, 28(%1) \n\t" \
5450 "sw $4, 24($29) \n\t" \
5451 "lw $4, 32(%1) \n\t" \
5452 "sw $4, 28($29) \n\t" \
5453 "lw $4, 4(%1) \n\t" \
5454 "lw $5, 8(%1) \n\t" \
5455 "lw $6, 12(%1) \n\t" \
5456 "lw $7, 16(%1) \n\t" \
5457 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5458 VALGRIND_CALL_NOREDIR_T9 \
5459 "addu $29, $29, 40 \n\t" \
5460 "lw $28, 0($29) \n\t" \
5461 "lw $31, 4($29) \n\t" \
5462 "addu $29, $29, 8 \n\t" \
5464 : /*out*/ "=r" (_res) \
5465 : /*in*/ "0" (&_argvec[0]) \
5466 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5468 lval = (__typeof__(lval)) _res; \
5471 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5474 volatile OrigFn _orig = (orig); \
5475 volatile unsigned long _argvec[10]; \
5476 volatile unsigned long _res; \
5477 _argvec[0] = (unsigned long)_orig.nraddr; \
5478 _argvec[1] = (unsigned long)(arg1); \
5479 _argvec[2] = (unsigned long)(arg2); \
5480 _argvec[3] = (unsigned long)(arg3); \
5481 _argvec[4] = (unsigned long)(arg4); \
5482 _argvec[5] = (unsigned long)(arg5); \
5483 _argvec[6] = (unsigned long)(arg6); \
5484 _argvec[7] = (unsigned long)(arg7); \
5485 _argvec[8] = (unsigned long)(arg8); \
5486 _argvec[9] = (unsigned long)(arg9); \
5488 "subu $29, $29, 8 \n\t" \
5489 "sw $28, 0($29) \n\t" \
5490 "sw $31, 4($29) \n\t" \
5491 "lw $4, 20(%1) \n\t" \
5492 "subu $29, $29, 40\n\t" \
5493 "sw $4, 16($29) \n\t" \
5494 "lw $4, 24(%1) \n\t" \
5495 "sw $4, 20($29) \n\t" \
5496 "lw $4, 28(%1) \n\t" \
5497 "sw $4, 24($29) \n\t" \
5498 "lw $4, 32(%1) \n\t" \
5499 "sw $4, 28($29) \n\t" \
5500 "lw $4, 36(%1) \n\t" \
5501 "sw $4, 32($29) \n\t" \
5502 "lw $4, 4(%1) \n\t" \
5503 "lw $5, 8(%1) \n\t" \
5504 "lw $6, 12(%1) \n\t" \
5505 "lw $7, 16(%1) \n\t" \
5506 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5507 VALGRIND_CALL_NOREDIR_T9 \
5508 "addu $29, $29, 40 \n\t" \
5509 "lw $28, 0($29) \n\t" \
5510 "lw $31, 4($29) \n\t" \
5511 "addu $29, $29, 8 \n\t" \
5513 : /*out*/ "=r" (_res) \
5514 : /*in*/ "0" (&_argvec[0]) \
5515 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5517 lval = (__typeof__(lval)) _res; \
5520 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5521 arg7,arg8,arg9,arg10) \
5523 volatile OrigFn _orig = (orig); \
5524 volatile unsigned long _argvec[11]; \
5525 volatile unsigned long _res; \
5526 _argvec[0] = (unsigned long)_orig.nraddr; \
5527 _argvec[1] = (unsigned long)(arg1); \
5528 _argvec[2] = (unsigned long)(arg2); \
5529 _argvec[3] = (unsigned long)(arg3); \
5530 _argvec[4] = (unsigned long)(arg4); \
5531 _argvec[5] = (unsigned long)(arg5); \
5532 _argvec[6] = (unsigned long)(arg6); \
5533 _argvec[7] = (unsigned long)(arg7); \
5534 _argvec[8] = (unsigned long)(arg8); \
5535 _argvec[9] = (unsigned long)(arg9); \
5536 _argvec[10] = (unsigned long)(arg10); \
5538 "subu $29, $29, 8 \n\t" \
5539 "sw $28, 0($29) \n\t" \
5540 "sw $31, 4($29) \n\t" \
5541 "lw $4, 20(%1) \n\t" \
5542 "subu $29, $29, 48\n\t" \
5543 "sw $4, 16($29) \n\t" \
5544 "lw $4, 24(%1) \n\t" \
5545 "sw $4, 20($29) \n\t" \
5546 "lw $4, 28(%1) \n\t" \
5547 "sw $4, 24($29) \n\t" \
5548 "lw $4, 32(%1) \n\t" \
5549 "sw $4, 28($29) \n\t" \
5550 "lw $4, 36(%1) \n\t" \
5551 "sw $4, 32($29) \n\t" \
5552 "lw $4, 40(%1) \n\t" \
5553 "sw $4, 36($29) \n\t" \
5554 "lw $4, 4(%1) \n\t" \
5555 "lw $5, 8(%1) \n\t" \
5556 "lw $6, 12(%1) \n\t" \
5557 "lw $7, 16(%1) \n\t" \
5558 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5559 VALGRIND_CALL_NOREDIR_T9 \
5560 "addu $29, $29, 48 \n\t" \
5561 "lw $28, 0($29) \n\t" \
5562 "lw $31, 4($29) \n\t" \
5563 "addu $29, $29, 8 \n\t" \
5565 : /*out*/ "=r" (_res) \
5566 : /*in*/ "0" (&_argvec[0]) \
5567 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5569 lval = (__typeof__(lval)) _res; \
5572 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5573 arg6,arg7,arg8,arg9,arg10, \
5576 volatile OrigFn _orig = (orig); \
5577 volatile unsigned long _argvec[12]; \
5578 volatile unsigned long _res; \
5579 _argvec[0] = (unsigned long)_orig.nraddr; \
5580 _argvec[1] = (unsigned long)(arg1); \
5581 _argvec[2] = (unsigned long)(arg2); \
5582 _argvec[3] = (unsigned long)(arg3); \
5583 _argvec[4] = (unsigned long)(arg4); \
5584 _argvec[5] = (unsigned long)(arg5); \
5585 _argvec[6] = (unsigned long)(arg6); \
5586 _argvec[7] = (unsigned long)(arg7); \
5587 _argvec[8] = (unsigned long)(arg8); \
5588 _argvec[9] = (unsigned long)(arg9); \
5589 _argvec[10] = (unsigned long)(arg10); \
5590 _argvec[11] = (unsigned long)(arg11); \
5592 "subu $29, $29, 8 \n\t" \
5593 "sw $28, 0($29) \n\t" \
5594 "sw $31, 4($29) \n\t" \
5595 "lw $4, 20(%1) \n\t" \
5596 "subu $29, $29, 48\n\t" \
5597 "sw $4, 16($29) \n\t" \
5598 "lw $4, 24(%1) \n\t" \
5599 "sw $4, 20($29) \n\t" \
5600 "lw $4, 28(%1) \n\t" \
5601 "sw $4, 24($29) \n\t" \
5602 "lw $4, 32(%1) \n\t" \
5603 "sw $4, 28($29) \n\t" \
5604 "lw $4, 36(%1) \n\t" \
5605 "sw $4, 32($29) \n\t" \
5606 "lw $4, 40(%1) \n\t" \
5607 "sw $4, 36($29) \n\t" \
5608 "lw $4, 44(%1) \n\t" \
5609 "sw $4, 40($29) \n\t" \
5610 "lw $4, 4(%1) \n\t" \
5611 "lw $5, 8(%1) \n\t" \
5612 "lw $6, 12(%1) \n\t" \
5613 "lw $7, 16(%1) \n\t" \
5614 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5615 VALGRIND_CALL_NOREDIR_T9 \
5616 "addu $29, $29, 48 \n\t" \
5617 "lw $28, 0($29) \n\t" \
5618 "lw $31, 4($29) \n\t" \
5619 "addu $29, $29, 8 \n\t" \
5621 : /*out*/ "=r" (_res) \
5622 : /*in*/ "0" (&_argvec[0]) \
5623 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5625 lval = (__typeof__(lval)) _res; \
5628 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5629 arg6,arg7,arg8,arg9,arg10, \
5632 volatile OrigFn _orig = (orig); \
5633 volatile unsigned long _argvec[13]; \
5634 volatile unsigned long _res; \
5635 _argvec[0] = (unsigned long)_orig.nraddr; \
5636 _argvec[1] = (unsigned long)(arg1); \
5637 _argvec[2] = (unsigned long)(arg2); \
5638 _argvec[3] = (unsigned long)(arg3); \
5639 _argvec[4] = (unsigned long)(arg4); \
5640 _argvec[5] = (unsigned long)(arg5); \
5641 _argvec[6] = (unsigned long)(arg6); \
5642 _argvec[7] = (unsigned long)(arg7); \
5643 _argvec[8] = (unsigned long)(arg8); \
5644 _argvec[9] = (unsigned long)(arg9); \
5645 _argvec[10] = (unsigned long)(arg10); \
5646 _argvec[11] = (unsigned long)(arg11); \
5647 _argvec[12] = (unsigned long)(arg12); \
5649 "subu $29, $29, 8 \n\t" \
5650 "sw $28, 0($29) \n\t" \
5651 "sw $31, 4($29) \n\t" \
5652 "lw $4, 20(%1) \n\t" \
5653 "subu $29, $29, 56\n\t" \
5654 "sw $4, 16($29) \n\t" \
5655 "lw $4, 24(%1) \n\t" \
5656 "sw $4, 20($29) \n\t" \
5657 "lw $4, 28(%1) \n\t" \
5658 "sw $4, 24($29) \n\t" \
5659 "lw $4, 32(%1) \n\t" \
5660 "sw $4, 28($29) \n\t" \
5661 "lw $4, 36(%1) \n\t" \
5662 "sw $4, 32($29) \n\t" \
5663 "lw $4, 40(%1) \n\t" \
5664 "sw $4, 36($29) \n\t" \
5665 "lw $4, 44(%1) \n\t" \
5666 "sw $4, 40($29) \n\t" \
5667 "lw $4, 48(%1) \n\t" \
5668 "sw $4, 44($29) \n\t" \
5669 "lw $4, 4(%1) \n\t" \
5670 "lw $5, 8(%1) \n\t" \
5671 "lw $6, 12(%1) \n\t" \
5672 "lw $7, 16(%1) \n\t" \
5673 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5674 VALGRIND_CALL_NOREDIR_T9 \
5675 "addu $29, $29, 56 \n\t" \
5676 "lw $28, 0($29) \n\t" \
5677 "lw $31, 4($29) \n\t" \
5678 "addu $29, $29, 8 \n\t" \
5680 : /*out*/ "=r" (_res) \
5681 : /*in*/ "r" (&_argvec[0]) \
5682 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5684 lval = (__typeof__(lval)) _res; \
5687 #endif /* PLAT_mips32_linux */
5689 /* ------------------------- mips64-linux ------------------------- */
5691 #if defined(PLAT_mips64_linux)
5693 /* These regs are trashed by the hidden call. */
5694 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5695 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5698 /* These CALL_FN_ macros assume that on mips64-linux,
5699 sizeof(long long) == 8. */
5701 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
5703 #define CALL_FN_W_v(lval, orig) \
5705 volatile OrigFn _orig = (orig); \
5706 volatile unsigned long long _argvec[1]; \
5707 volatile unsigned long long _res; \
5708 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5710 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5711 VALGRIND_CALL_NOREDIR_T9 \
5713 : /*out*/ "=r" (_res) \
5714 : /*in*/ "0" (&_argvec[0]) \
5715 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5717 lval = (__typeof__(lval)) (long)_res; \
5720 #define CALL_FN_W_W(lval, orig, arg1) \
5722 volatile OrigFn _orig = (orig); \
5723 volatile unsigned long long _argvec[2]; \
5724 volatile unsigned long long _res; \
5725 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5726 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5728 "ld $4, 8(%1)\n\t" /* arg1*/ \
5729 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5730 VALGRIND_CALL_NOREDIR_T9 \
5732 : /*out*/ "=r" (_res) \
5733 : /*in*/ "r" (&_argvec[0]) \
5734 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5736 lval = (__typeof__(lval)) (long)_res; \
5739 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5741 volatile OrigFn _orig = (orig); \
5742 volatile unsigned long long _argvec[3]; \
5743 volatile unsigned long long _res; \
5744 _argvec[0] = _orig.nraddr; \
5745 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5746 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5748 "ld $4, 8(%1)\n\t" \
5749 "ld $5, 16(%1)\n\t" \
5750 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5751 VALGRIND_CALL_NOREDIR_T9 \
5753 : /*out*/ "=r" (_res) \
5754 : /*in*/ "r" (&_argvec[0]) \
5755 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5757 lval = (__typeof__(lval)) (long)_res; \
5761 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5763 volatile OrigFn _orig = (orig); \
5764 volatile unsigned long long _argvec[4]; \
5765 volatile unsigned long long _res; \
5766 _argvec[0] = _orig.nraddr; \
5767 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5768 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5769 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5771 "ld $4, 8(%1)\n\t" \
5772 "ld $5, 16(%1)\n\t" \
5773 "ld $6, 24(%1)\n\t" \
5774 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5775 VALGRIND_CALL_NOREDIR_T9 \
5777 : /*out*/ "=r" (_res) \
5778 : /*in*/ "r" (&_argvec[0]) \
5779 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5781 lval = (__typeof__(lval)) (long)_res; \
5784 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5786 volatile OrigFn _orig = (orig); \
5787 volatile unsigned long long _argvec[5]; \
5788 volatile unsigned long long _res; \
5789 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5790 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5791 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5792 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5793 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5795 "ld $4, 8(%1)\n\t" \
5796 "ld $5, 16(%1)\n\t" \
5797 "ld $6, 24(%1)\n\t" \
5798 "ld $7, 32(%1)\n\t" \
5799 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5800 VALGRIND_CALL_NOREDIR_T9 \
5802 : /*out*/ "=r" (_res) \
5803 : /*in*/ "r" (&_argvec[0]) \
5804 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5806 lval = (__typeof__(lval)) (long)_res; \
5809 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5811 volatile OrigFn _orig = (orig); \
5812 volatile unsigned long long _argvec[6]; \
5813 volatile unsigned long long _res; \
5814 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5815 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5816 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5817 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5818 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5819 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5821 "ld $4, 8(%1)\n\t" \
5822 "ld $5, 16(%1)\n\t" \
5823 "ld $6, 24(%1)\n\t" \
5824 "ld $7, 32(%1)\n\t" \
5825 "ld $8, 40(%1)\n\t" \
5826 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5827 VALGRIND_CALL_NOREDIR_T9 \
5829 : /*out*/ "=r" (_res) \
5830 : /*in*/ "r" (&_argvec[0]) \
5831 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5833 lval = (__typeof__(lval)) (long)_res; \
5836 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5838 volatile OrigFn _orig = (orig); \
5839 volatile unsigned long long _argvec[7]; \
5840 volatile unsigned long long _res; \
5841 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5842 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5843 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5844 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5845 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5846 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5847 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5849 "ld $4, 8(%1)\n\t" \
5850 "ld $5, 16(%1)\n\t" \
5851 "ld $6, 24(%1)\n\t" \
5852 "ld $7, 32(%1)\n\t" \
5853 "ld $8, 40(%1)\n\t" \
5854 "ld $9, 48(%1)\n\t" \
5855 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5856 VALGRIND_CALL_NOREDIR_T9 \
5858 : /*out*/ "=r" (_res) \
5859 : /*in*/ "r" (&_argvec[0]) \
5860 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5862 lval = (__typeof__(lval)) (long)_res; \
5865 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5868 volatile OrigFn _orig = (orig); \
5869 volatile unsigned long long _argvec[8]; \
5870 volatile unsigned long long _res; \
5871 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5872 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5873 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5874 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5875 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5876 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5877 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5878 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5880 "ld $4, 8(%1)\n\t" \
5881 "ld $5, 16(%1)\n\t" \
5882 "ld $6, 24(%1)\n\t" \
5883 "ld $7, 32(%1)\n\t" \
5884 "ld $8, 40(%1)\n\t" \
5885 "ld $9, 48(%1)\n\t" \
5886 "ld $10, 56(%1)\n\t" \
5887 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5888 VALGRIND_CALL_NOREDIR_T9 \
5890 : /*out*/ "=r" (_res) \
5891 : /*in*/ "r" (&_argvec[0]) \
5892 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5894 lval = (__typeof__(lval)) (long)_res; \
5897 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5900 volatile OrigFn _orig = (orig); \
5901 volatile unsigned long long _argvec[9]; \
5902 volatile unsigned long long _res; \
5903 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5904 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5905 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5906 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5907 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5908 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5909 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5910 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5911 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5913 "ld $4, 8(%1)\n\t" \
5914 "ld $5, 16(%1)\n\t" \
5915 "ld $6, 24(%1)\n\t" \
5916 "ld $7, 32(%1)\n\t" \
5917 "ld $8, 40(%1)\n\t" \
5918 "ld $9, 48(%1)\n\t" \
5919 "ld $10, 56(%1)\n\t" \
5920 "ld $11, 64(%1)\n\t" \
5921 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5922 VALGRIND_CALL_NOREDIR_T9 \
5924 : /*out*/ "=r" (_res) \
5925 : /*in*/ "r" (&_argvec[0]) \
5926 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5928 lval = (__typeof__(lval)) (long)_res; \
5931 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5934 volatile OrigFn _orig = (orig); \
5935 volatile unsigned long long _argvec[10]; \
5936 volatile unsigned long long _res; \
5937 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5938 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5939 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5940 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5941 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5942 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5943 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5944 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5945 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5946 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5948 "dsubu $29, $29, 8\n\t" \
5949 "ld $4, 72(%1)\n\t" \
5950 "sd $4, 0($29)\n\t" \
5951 "ld $4, 8(%1)\n\t" \
5952 "ld $5, 16(%1)\n\t" \
5953 "ld $6, 24(%1)\n\t" \
5954 "ld $7, 32(%1)\n\t" \
5955 "ld $8, 40(%1)\n\t" \
5956 "ld $9, 48(%1)\n\t" \
5957 "ld $10, 56(%1)\n\t" \
5958 "ld $11, 64(%1)\n\t" \
5959 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5960 VALGRIND_CALL_NOREDIR_T9 \
5961 "daddu $29, $29, 8\n\t" \
5963 : /*out*/ "=r" (_res) \
5964 : /*in*/ "r" (&_argvec[0]) \
5965 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5967 lval = (__typeof__(lval)) (long)_res; \
5970 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5971 arg7,arg8,arg9,arg10) \
5973 volatile OrigFn _orig = (orig); \
5974 volatile unsigned long long _argvec[11]; \
5975 volatile unsigned long long _res; \
5976 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5977 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5978 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5979 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5980 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5981 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5982 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5983 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5984 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5985 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5986 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
5988 "dsubu $29, $29, 16\n\t" \
5989 "ld $4, 72(%1)\n\t" \
5990 "sd $4, 0($29)\n\t" \
5991 "ld $4, 80(%1)\n\t" \
5992 "sd $4, 8($29)\n\t" \
5993 "ld $4, 8(%1)\n\t" \
5994 "ld $5, 16(%1)\n\t" \
5995 "ld $6, 24(%1)\n\t" \
5996 "ld $7, 32(%1)\n\t" \
5997 "ld $8, 40(%1)\n\t" \
5998 "ld $9, 48(%1)\n\t" \
5999 "ld $10, 56(%1)\n\t" \
6000 "ld $11, 64(%1)\n\t" \
6001 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6002 VALGRIND_CALL_NOREDIR_T9 \
6003 "daddu $29, $29, 16\n\t" \
6005 : /*out*/ "=r" (_res) \
6006 : /*in*/ "r" (&_argvec[0]) \
6007 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6009 lval = (__typeof__(lval)) (long)_res; \
6012 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6013 arg6,arg7,arg8,arg9,arg10, \
6016 volatile OrigFn _orig = (orig); \
6017 volatile unsigned long long _argvec[12]; \
6018 volatile unsigned long long _res; \
6019 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6020 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6021 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6022 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6023 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6024 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6025 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6026 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6027 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6028 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6029 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6030 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6032 "dsubu $29, $29, 24\n\t" \
6033 "ld $4, 72(%1)\n\t" \
6034 "sd $4, 0($29)\n\t" \
6035 "ld $4, 80(%1)\n\t" \
6036 "sd $4, 8($29)\n\t" \
6037 "ld $4, 88(%1)\n\t" \
6038 "sd $4, 16($29)\n\t" \
6039 "ld $4, 8(%1)\n\t" \
6040 "ld $5, 16(%1)\n\t" \
6041 "ld $6, 24(%1)\n\t" \
6042 "ld $7, 32(%1)\n\t" \
6043 "ld $8, 40(%1)\n\t" \
6044 "ld $9, 48(%1)\n\t" \
6045 "ld $10, 56(%1)\n\t" \
6046 "ld $11, 64(%1)\n\t" \
6047 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6048 VALGRIND_CALL_NOREDIR_T9 \
6049 "daddu $29, $29, 24\n\t" \
6051 : /*out*/ "=r" (_res) \
6052 : /*in*/ "r" (&_argvec[0]) \
6053 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6055 lval = (__typeof__(lval)) (long)_res; \
6058 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6059 arg6,arg7,arg8,arg9,arg10, \
6062 volatile OrigFn _orig = (orig); \
6063 volatile unsigned long long _argvec[13]; \
6064 volatile unsigned long long _res; \
6065 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6066 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6067 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6068 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6069 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6070 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6071 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6072 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6073 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6074 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6075 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6076 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6077 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6079 "dsubu $29, $29, 32\n\t" \
6080 "ld $4, 72(%1)\n\t" \
6081 "sd $4, 0($29)\n\t" \
6082 "ld $4, 80(%1)\n\t" \
6083 "sd $4, 8($29)\n\t" \
6084 "ld $4, 88(%1)\n\t" \
6085 "sd $4, 16($29)\n\t" \
6086 "ld $4, 96(%1)\n\t" \
6087 "sd $4, 24($29)\n\t" \
6088 "ld $4, 8(%1)\n\t" \
6089 "ld $5, 16(%1)\n\t" \
6090 "ld $6, 24(%1)\n\t" \
6091 "ld $7, 32(%1)\n\t" \
6092 "ld $8, 40(%1)\n\t" \
6093 "ld $9, 48(%1)\n\t" \
6094 "ld $10, 56(%1)\n\t" \
6095 "ld $11, 64(%1)\n\t" \
6096 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6097 VALGRIND_CALL_NOREDIR_T9 \
6098 "daddu $29, $29, 32\n\t" \
6100 : /*out*/ "=r" (_res) \
6101 : /*in*/ "r" (&_argvec[0]) \
6102 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6104 lval = (__typeof__(lval)) (long)_res; \
6107 #endif /* PLAT_mips64_linux */
6109 /* ------------------------------------------------------------------ */
6110 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6112 /* ------------------------------------------------------------------ */
6114 /* Some request codes. There are many more of these, but most are not
6115 exposed to end-user view. These are the public ones, all of the
6116 form 0x1000 + small_number.
6118 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6119 ones start at 0x2000.
6122 /* These macros are used by tools -- they must be public, but don't
6123 embed them into other programs. */
6124 #define VG_USERREQ_TOOL_BASE(a,b) \
6125 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6126 #define VG_IS_TOOL_USERREQ(a, b, v) \
6127 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6129 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6130 This enum comprises an ABI exported by Valgrind to programs
6131 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6132 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6135 enum { VG_USERREQ__RUNNING_ON_VALGRIND
= 0x1001,
6136 VG_USERREQ__DISCARD_TRANSLATIONS
= 0x1002,
6138 /* These allow any function to be called from the simulated
6139 CPU but run on the real CPU. Nb: the first arg passed to
6140 the function is always the ThreadId of the running
6141 thread! So CLIENT_CALL0 actually requires a 1 arg
6143 VG_USERREQ__CLIENT_CALL0
= 0x1101,
6144 VG_USERREQ__CLIENT_CALL1
= 0x1102,
6145 VG_USERREQ__CLIENT_CALL2
= 0x1103,
6146 VG_USERREQ__CLIENT_CALL3
= 0x1104,
6148 /* Can be useful in regression testing suites -- eg. can
6149 send Valgrind's output to /dev/null and still count
6151 VG_USERREQ__COUNT_ERRORS
= 0x1201,
6153 /* Allows the client program and/or gdbserver to execute a monitor
6155 VG_USERREQ__GDB_MONITOR_COMMAND
= 0x1202,
6157 /* These are useful and can be interpreted by any tool that
6158 tracks malloc() et al, by using vg_replace_malloc.c. */
6159 VG_USERREQ__MALLOCLIKE_BLOCK
= 0x1301,
6160 VG_USERREQ__RESIZEINPLACE_BLOCK
= 0x130b,
6161 VG_USERREQ__FREELIKE_BLOCK
= 0x1302,
6162 /* Memory pool support. */
6163 VG_USERREQ__CREATE_MEMPOOL
= 0x1303,
6164 VG_USERREQ__DESTROY_MEMPOOL
= 0x1304,
6165 VG_USERREQ__MEMPOOL_ALLOC
= 0x1305,
6166 VG_USERREQ__MEMPOOL_FREE
= 0x1306,
6167 VG_USERREQ__MEMPOOL_TRIM
= 0x1307,
6168 VG_USERREQ__MOVE_MEMPOOL
= 0x1308,
6169 VG_USERREQ__MEMPOOL_CHANGE
= 0x1309,
6170 VG_USERREQ__MEMPOOL_EXISTS
= 0x130a,
6172 /* Allow printfs to valgrind log. */
6173 /* The first two pass the va_list argument by value, which
6174 assumes it is the same size as or smaller than a UWord,
6175 which generally isn't the case. Hence are deprecated.
6176 The second two pass the vargs by reference and so are
6177 immune to this problem. */
6178 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6179 VG_USERREQ__PRINTF
= 0x1401,
6180 VG_USERREQ__PRINTF_BACKTRACE
= 0x1402,
6181 /* both :: char* fmt, va_list* vargs */
6182 VG_USERREQ__PRINTF_VALIST_BY_REF
= 0x1403,
6183 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
= 0x1404,
6185 /* Stack support. */
6186 VG_USERREQ__STACK_REGISTER
= 0x1501,
6187 VG_USERREQ__STACK_DEREGISTER
= 0x1502,
6188 VG_USERREQ__STACK_CHANGE
= 0x1503,
6191 VG_USERREQ__LOAD_PDB_DEBUGINFO
= 0x1601,
6193 /* Querying of debug info. */
6194 VG_USERREQ__MAP_IP_TO_SRCLOC
= 0x1701,
6196 /* Disable/enable error reporting level. Takes a single
6197 Word arg which is the delta to this thread's error
6198 disablement indicator. Hence 1 disables or further
6199 disables errors, and -1 moves back towards enablement.
6200 Other values are not allowed. */
6201 VG_USERREQ__CHANGE_ERR_DISABLEMENT
= 0x1801,
6203 /* Some requests used for Valgrind internal, such as
6204 self-test or self-hosting. */
6205 /* Initialise IR injection */
6206 VG_USERREQ__VEX_INIT_FOR_IRI
= 0x1901,
6207 /* Used by Inner Valgrind to inform Outer Valgrind where to
6208 find the list of inner guest threads */
6209 VG_USERREQ__INNER_THREADS
= 0x1902
6212 #if !defined(__GNUC__)
6213 # define __extension__ /* */
6217 /* Returns the number of Valgrinds this code is running under. That
6218 is, 0 if running natively, 1 if running under Valgrind, 2 if
6219 running under Valgrind which is running under another Valgrind,
6221 #define RUNNING_ON_VALGRIND \
6222 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6223 VG_USERREQ__RUNNING_ON_VALGRIND, \
6227 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6228 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6229 since it provides a way to make sure valgrind will retranslate the
6230 invalidated area. Returns no value. */
6231 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6232 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6233 _qzz_addr, _qzz_len, 0, 0, 0)
6235 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6236 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6237 _qzz_addr, 0, 0, 0, 0)
6240 /* These requests are for getting Valgrind itself to print something.
6241 Possibly with a backtrace. This is a really ugly hack. The return value
6242 is the number of characters printed, excluding the "**<pid>** " part at the
6243 start and the backtrace (if present). */
6245 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6246 /* Modern GCC will optimize the static routine out if unused,
6247 and unused attribute will shut down warnings about it. */
6248 static int VALGRIND_PRINTF(const char *format
, ...)
6249 __attribute__((format(__printf__
, 1, 2), __unused__
));
6252 #if defined(_MSC_VER)
6255 VALGRIND_PRINTF(const char *format
, ...)
6257 #if defined(NVALGRIND)
6260 #else /* NVALGRIND */
6261 #if defined(_MSC_VER) || defined(__MINGW64__)
6264 unsigned long _qzz_res
;
6267 va_start(vargs
, format
);
6268 #if defined(_MSC_VER) || defined(__MINGW64__)
6269 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6270 VG_USERREQ__PRINTF_VALIST_BY_REF
,
6275 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6276 VG_USERREQ__PRINTF_VALIST_BY_REF
,
6277 (unsigned long)format
,
6278 (unsigned long)&vargs
,
6282 return (int)_qzz_res
;
6283 #endif /* NVALGRIND */
6286 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6287 static int VALGRIND_PRINTF_BACKTRACE(const char *format
, ...)
6288 __attribute__((format(__printf__
, 1, 2), __unused__
));
6291 #if defined(_MSC_VER)
6294 VALGRIND_PRINTF_BACKTRACE(const char *format
, ...)
6296 #if defined(NVALGRIND)
6299 #else /* NVALGRIND */
6300 #if defined(_MSC_VER) || defined(__MINGW64__)
6303 unsigned long _qzz_res
;
6306 va_start(vargs
, format
);
6307 #if defined(_MSC_VER) || defined(__MINGW64__)
6308 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6309 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
,
6314 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6315 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
,
6316 (unsigned long)format
,
6317 (unsigned long)&vargs
,
6321 return (int)_qzz_res
;
6322 #endif /* NVALGRIND */
6326 /* These requests allow control to move from the simulated CPU to the
6327 real CPU, calling an arbitrary function.
6329 Note that the current ThreadId is inserted as the first argument.
6332 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6334 requires f to have this signature:
6336 Word f(Word tid, Word arg1, Word arg2)
6338 where "Word" is a word-sized type.
6340 Note that these client requests are not entirely reliable. For example,
6341 if you call a function with them that subsequently calls printf(),
6342 there's a high chance Valgrind will crash. Generally, your prospects of
6343 these working are made higher if the called function does not refer to
6344 any global variables, and does not refer to any libc or other functions
6345 (printf et al). Any kind of entanglement with libc or dynamic linking is
6346 likely to have a bad outcome, for tricky reasons which we've grappled
6347 with a lot in the past.
6349 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6350 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6351 VG_USERREQ__CLIENT_CALL0, \
6355 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6356 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6357 VG_USERREQ__CLIENT_CALL1, \
6361 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6362 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6363 VG_USERREQ__CLIENT_CALL2, \
6365 _qyy_arg1, _qyy_arg2, 0, 0)
6367 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6368 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6369 VG_USERREQ__CLIENT_CALL3, \
6371 _qyy_arg1, _qyy_arg2, \
6375 /* Counts the number of errors that have been recorded by a tool. Nb:
6376 the tool must record the errors with VG_(maybe_record_error)() or
6377 VG_(unique_error)() for them to be counted. */
6378 #define VALGRIND_COUNT_ERRORS \
6379 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6380 0 /* default return */, \
6381 VG_USERREQ__COUNT_ERRORS, \
6384 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6385 when heap blocks are allocated in order to give accurate results. This
6386 happens automatically for the standard allocator functions such as
6387 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6390 But if your program uses a custom allocator, this doesn't automatically
6391 happen, and Valgrind will not do as well. For example, if you allocate
6392 superblocks with mmap() and then allocates chunks of the superblocks, all
6393 Valgrind's observations will be at the mmap() level and it won't know that
6394 the chunks should be considered separate entities. In Memcheck's case,
6395 that means you probably won't get heap block overrun detection (because
6396 there won't be redzones marked as unaddressable) and you definitely won't
6397 get any leak detection.
6399 The following client requests allow a custom allocator to be annotated so
6400 that it can be handled accurately by Valgrind.
6402 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6403 by a malloc()-like function. For Memcheck (an illustrative case), this
6406 - It records that the block has been allocated. This means any addresses
6407 within the block mentioned in error messages will be
6408 identified as belonging to the block. It also means that if the block
6409 isn't freed it will be detected by the leak checker.
6411 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6412 not set), or addressable and defined (if 'is_zeroed' is set). This
6413 controls how accesses to the block by the program are handled.
6415 'addr' is the start of the usable block (ie. after any
6416 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6417 can apply redzones -- these are blocks of padding at the start and end of
6418 each block. Adding redzones is recommended as it makes it much more likely
6419 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6420 zeroed (or filled with another predictable value), as is the case for
6423 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6424 heap block -- that will be used by the client program -- is allocated.
6425 It's best to put it at the outermost level of the allocator if possible;
6426 for example, if you have a function my_alloc() which calls
6427 internal_alloc(), and the client request is put inside internal_alloc(),
6428 stack traces relating to the heap block will contain entries for both
6429 my_alloc() and internal_alloc(), which is probably not what you want.
6431 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6432 custom blocks from within a heap block, B, that has been allocated with
6433 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6434 -- the custom blocks will take precedence.
6436 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6437 Memcheck, it does two things:
6439 - It records that the block has been deallocated. This assumes that the
6440 block was annotated as having been allocated via
6441 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6443 - It marks the block as being unaddressable.
6445 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6446 heap block is deallocated.
6448 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6449 Memcheck, it does four things:
6451 - It records that the size of a block has been changed. This assumes that
6452 the block was annotated as having been allocated via
6453 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6455 - If the block shrunk, it marks the freed memory as being unaddressable.
6457 - If the block grew, it marks the new area as undefined and defines a red
6458 zone past the end of the new block.
6460 - The V-bits of the overlap between the old and the new block are preserved.
6462 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6463 and before deallocation of the old block.
6465 In many cases, these three client requests will not be enough to get your
6466 allocator working well with Memcheck. More specifically, if your allocator
6467 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6468 will be necessary to mark the memory as addressable just before the zeroing
6469 occurs, otherwise you'll get a lot of invalid write errors. For example,
6470 you'll need to do this if your allocator recycles freed blocks, but it
6471 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6472 Alternatively, if your allocator reuses freed blocks for allocator-internal
6473 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6475 Really, what's happening is a blurring of the lines between the client
6476 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6477 memory should be considered unaddressable to the client program, but the
6478 allocator knows more than the rest of the client program and so may be able
6479 to safely access it. Extra client requests are necessary for Valgrind to
6480 understand the distinction between the allocator and the rest of the
6483 Ignored if addr == 0.
6485 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6486 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6487 addr, sizeB, rzB, is_zeroed, 0)
6489 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6490 Ignored if addr == 0.
6492 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6493 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6494 addr, oldSizeB, newSizeB, rzB, 0)
6496 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6497 Ignored if addr == 0.
6499 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6500 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6503 /* Create a memory pool. */
6504 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6505 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6506 pool, rzB, is_zeroed, 0, 0)
6508 /* Create a memory pool with some flags specifying extended behaviour.
6509 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6511 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6512 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6513 by the application as superblocks to dole out MALLOC_LIKE blocks using
6514 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6515 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6516 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6517 Note that the association between the pool and the second level blocks
6518 is implicit : second level blocks will be located inside first level
6519 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6520 for such 2 levels pools, as otherwise valgrind will detect overlapping
6521 memory blocks, and will abort execution (e.g. during leak search).
6523 Such a meta pool can also be marked as an 'auto free' pool using the flag
6524 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6525 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6526 will automatically free the second level blocks that are contained
6527 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6528 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6529 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6530 in the first level block.
6531 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6532 without the VALGRIND_MEMPOOL_METAPOOL flag.
6534 #define VALGRIND_MEMPOOL_AUTO_FREE 1
6535 #define VALGRIND_MEMPOOL_METAPOOL 2
6536 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6537 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6538 pool, rzB, is_zeroed, flags, 0)
6540 /* Destroy a memory pool. */
6541 #define VALGRIND_DESTROY_MEMPOOL(pool) \
6542 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6545 /* Associate a piece of memory with a memory pool. */
6546 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6547 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6548 pool, addr, size, 0, 0)
6550 /* Disassociate a piece of memory from a memory pool. */
6551 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
6552 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6553 pool, addr, 0, 0, 0)
6555 /* Disassociate any pieces outside a particular range. */
6556 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6557 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6558 pool, addr, size, 0, 0)
6560 /* Resize and/or move a piece associated with a memory pool. */
6561 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6562 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6563 poolA, poolB, 0, 0, 0)
6565 /* Resize and/or move a piece associated with a memory pool. */
6566 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6567 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6568 pool, addrA, addrB, size, 0)
6570 /* Return 1 if a mempool exists, else 0. */
6571 #define VALGRIND_MEMPOOL_EXISTS(pool) \
6572 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6573 VG_USERREQ__MEMPOOL_EXISTS, \
6576 /* Mark a piece of memory as being a stack. Returns a stack id.
6577 start is the lowest addressable stack byte, end is the highest
6578 addressable stack byte. */
6579 #define VALGRIND_STACK_REGISTER(start, end) \
6580 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6581 VG_USERREQ__STACK_REGISTER, \
6582 start, end, 0, 0, 0)
6584 /* Unmark the piece of memory associated with a stack id as being a
6586 #define VALGRIND_STACK_DEREGISTER(id) \
6587 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6590 /* Change the start and end address of the stack id.
6591 start is the new lowest addressable stack byte, end is the new highest
6592 addressable stack byte. */
6593 #define VALGRIND_STACK_CHANGE(id, start, end) \
6594 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6595 id, start, end, 0, 0)
6597 /* Load PDB debug info for Wine PE image_map. */
6598 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6599 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6600 fd, ptr, total_size, delta, 0)
6602 /* Map a code address to a source file name and line number. buf64
6603 must point to a 64-byte buffer in the caller's address space. The
6604 result will be dumped in there and is guaranteed to be zero
6605 terminated. If no info is found, the first byte is set to zero. */
6606 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6607 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6608 VG_USERREQ__MAP_IP_TO_SRCLOC, \
6609 addr, buf64, 0, 0, 0)
6611 /* Disable error reporting for this thread. Behaves in a stack like
6612 way, so you can safely call this multiple times provided that
6613 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6614 to re-enable reporting. The first call of this macro disables
6615 reporting. Subsequent calls have no effect except to increase the
6616 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6617 reporting. Child threads do not inherit this setting from their
6618 parents -- they are always created with reporting enabled. */
6619 #define VALGRIND_DISABLE_ERROR_REPORTING \
6620 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6623 /* Re-enable error reporting, as per comments on
6624 VALGRIND_DISABLE_ERROR_REPORTING. */
6625 #define VALGRIND_ENABLE_ERROR_REPORTING \
6626 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6629 /* Execute a monitor command from the client program.
6630 If a connection is opened with GDB, the output will be sent
6631 according to the output mode set for vgdb.
6632 If no connection is opened, output will go to the log output.
6633 Returns 1 if command not recognised, 0 otherwise. */
6634 #define VALGRIND_MONITOR_COMMAND(command) \
6635 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6636 command, 0, 0, 0, 0)
6639 #undef PLAT_x86_darwin
6640 #undef PLAT_amd64_darwin
6641 #undef PLAT_x86_win32
6642 #undef PLAT_amd64_win64
6643 #undef PLAT_x86_linux
6644 #undef PLAT_amd64_linux
6645 #undef PLAT_ppc32_linux
6646 #undef PLAT_ppc64be_linux
6647 #undef PLAT_ppc64le_linux
6648 #undef PLAT_arm_linux
6649 #undef PLAT_s390x_linux
6650 #undef PLAT_mips32_linux
6651 #undef PLAT_mips64_linux
6652 #undef PLAT_x86_solaris
6653 #undef PLAT_amd64_solaris
6655 #endif /* __VALGRIND_H */