drd: Add support for C11 thrd_create()
[valgrind.git] / include / valgrind.h.in
blob45f6522f348487bf3559527098d3bb25022d5e74
1 /* -*- c -*-
2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ @VG_VER_MAJOR@
92 #define __VALGRIND_MINOR__ @VG_VER_MINOR@
95 #include <stdarg.h>
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_freebsd
114 #undef PLAT_amd64_freebsd
115 #undef PLAT_x86_win32
116 #undef PLAT_amd64_win64
117 #undef PLAT_x86_linux
118 #undef PLAT_amd64_linux
119 #undef PLAT_ppc32_linux
120 #undef PLAT_ppc64be_linux
121 #undef PLAT_ppc64le_linux
122 #undef PLAT_arm_linux
123 #undef PLAT_arm64_linux
124 #undef PLAT_s390x_linux
125 #undef PLAT_mips32_linux
126 #undef PLAT_mips64_linux
127 #undef PLAT_nanomips_linux
128 #undef PLAT_x86_solaris
129 #undef PLAT_amd64_solaris
132 #if defined(__APPLE__) && defined(__i386__)
133 # define PLAT_x86_darwin 1
134 #elif defined(__APPLE__) && defined(__x86_64__)
135 # define PLAT_amd64_darwin 1
136 #elif defined(__FreeBSD__) && defined(__i386__)
137 # define PLAT_x86_freebsd 1
138 #elif defined(__FreeBSD__) && defined(__amd64__)
139 # define PLAT_amd64_freebsd 1
140 #elif (defined(__MINGW32__) && defined(__i386__)) \
141 || defined(__CYGWIN32__) \
142 || (defined(_WIN32) && defined(_M_IX86))
143 # define PLAT_x86_win32 1
144 #elif (defined(__MINGW32__) && defined(__x86_64__)) \
145 || (defined(_WIN32) && defined(_M_X64))
146 /* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
147 # define PLAT_amd64_win64 1
148 #elif defined(__linux__) && defined(__i386__)
149 # define PLAT_x86_linux 1
150 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
151 # define PLAT_amd64_linux 1
152 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
153 # define PLAT_ppc32_linux 1
154 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
155 /* Big Endian uses ELF version 1 */
156 # define PLAT_ppc64be_linux 1
157 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
158 /* Little Endian uses ELF version 2 */
159 # define PLAT_ppc64le_linux 1
160 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
161 # define PLAT_arm_linux 1
162 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
163 # define PLAT_arm64_linux 1
164 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
165 # define PLAT_s390x_linux 1
166 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
167 # define PLAT_mips64_linux 1
168 #elif defined(__linux__) && defined(__mips__) && (__mips==32)
169 # define PLAT_mips32_linux 1
170 #elif defined(__linux__) && defined(__nanomips__)
171 # define PLAT_nanomips_linux 1
172 #elif defined(__sun) && defined(__i386__)
173 # define PLAT_x86_solaris 1
174 #elif defined(__sun) && defined(__x86_64__)
175 # define PLAT_amd64_solaris 1
176 #else
177 /* If we're not compiling for our target platform, don't generate
178 any inline asms. */
179 # if !defined(NVALGRIND)
180 # define NVALGRIND 1
181 # endif
182 #endif
185 /* ------------------------------------------------------------------ */
186 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
187 /* in here of use to end-users -- skip to the next section. */
188 /* ------------------------------------------------------------------ */
191 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
192 * request. Accepts both pointers and integers as arguments.
194 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
195 * client request that does not return a value.
197 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
198 * client request and whose value equals the client request result. Accepts
199 * both pointers and integers as arguments. Note that such calls are not
200 * necessarily pure functions -- they may have side effects.
203 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
204 _zzq_request, _zzq_arg1, _zzq_arg2, \
205 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
206 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
207 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
208 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
210 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
211 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
212 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
213 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
214 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
216 #if defined(NVALGRIND)
218 /* Define NVALGRIND to completely remove the Valgrind magic sequence
219 from the compiled code (analogous to NDEBUG's effects on
220 assert()) */
221 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
222 _zzq_default, _zzq_request, \
223 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
224 (_zzq_default)
226 #else /* ! NVALGRIND */
228 /* The following defines the magic code sequences which the JITter
229 spots and handles magically. Don't look too closely at them as
230 they will rot your brain.
232 The assembly code sequences for all architectures is in this one
233 file. This is because this file must be stand-alone, and we don't
234 want to have multiple files.
236 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
237 value gets put in the return slot, so that everything works when
238 this is executed not under Valgrind. Args are passed in a memory
239 block, and so there's no intrinsic limit to the number that could
240 be passed, but it's currently five.
242 The macro args are:
243 _zzq_rlval result lvalue
244 _zzq_default default value (result returned when running on real CPU)
245 _zzq_request request code
246 _zzq_arg1..5 request params
248 The other two macros are used to support function wrapping, and are
249 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
250 guest's NRADDR pseudo-register and whatever other information is
251 needed to safely run the call original from the wrapper: on
252 ppc64-linux, the R2 value at the divert point is also needed. This
253 information is abstracted into a user-visible type, OrigFn.
255 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
256 guest, but guarantees that the branch instruction will not be
257 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
258 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
259 complete inline asm, since it needs to be combined with more magic
260 inline asm stuff to be useful.
263 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
265 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
266 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
267 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
269 typedef
270 struct {
271 unsigned int nraddr; /* where's the code? */
273 OrigFn;
275 #define __SPECIAL_INSTRUCTION_PREAMBLE \
276 "roll $3, %%edi ; roll $13, %%edi\n\t" \
277 "roll $29, %%edi ; roll $19, %%edi\n\t"
279 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
280 _zzq_default, _zzq_request, \
281 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
282 __extension__ \
283 ({volatile unsigned int _zzq_args[6]; \
284 volatile unsigned int _zzq_result; \
285 _zzq_args[0] = (unsigned int)(_zzq_request); \
286 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
287 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
288 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
289 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
290 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
291 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
292 /* %EDX = client_request ( %EAX ) */ \
293 "xchgl %%ebx,%%ebx" \
294 : "=d" (_zzq_result) \
295 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
296 : "cc", "memory" \
297 ); \
298 _zzq_result; \
301 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
302 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
303 volatile unsigned int __addr; \
304 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
305 /* %EAX = guest_NRADDR */ \
306 "xchgl %%ecx,%%ecx" \
307 : "=a" (__addr) \
309 : "cc", "memory" \
310 ); \
311 _zzq_orig->nraddr = __addr; \
314 #define VALGRIND_CALL_NOREDIR_EAX \
315 __SPECIAL_INSTRUCTION_PREAMBLE \
316 /* call-noredir *%EAX */ \
317 "xchgl %%edx,%%edx\n\t"
319 #define VALGRIND_VEX_INJECT_IR() \
320 do { \
321 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
322 "xchgl %%edi,%%edi\n\t" \
323 : : : "cc", "memory" \
324 ); \
325 } while (0)
327 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
328 || PLAT_x86_solaris */
330 /* ------------------------- x86-Win32 ------------------------- */
332 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
334 typedef
335 struct {
336 unsigned int nraddr; /* where's the code? */
338 OrigFn;
340 #if defined(_MSC_VER)
342 #define __SPECIAL_INSTRUCTION_PREAMBLE \
343 __asm rol edi, 3 __asm rol edi, 13 \
344 __asm rol edi, 29 __asm rol edi, 19
346 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
347 _zzq_default, _zzq_request, \
348 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
349 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
350 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
351 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
352 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
354 static __inline uintptr_t
355 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
356 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
357 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
358 uintptr_t _zzq_arg5)
360 volatile uintptr_t _zzq_args[6];
361 volatile unsigned int _zzq_result;
362 _zzq_args[0] = (uintptr_t)(_zzq_request);
363 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
364 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
365 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
366 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
367 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
368 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
369 __SPECIAL_INSTRUCTION_PREAMBLE
370 /* %EDX = client_request ( %EAX ) */
371 __asm xchg ebx,ebx
372 __asm mov _zzq_result, edx
374 return _zzq_result;
377 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
378 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
379 volatile unsigned int __addr; \
380 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
381 /* %EAX = guest_NRADDR */ \
382 __asm xchg ecx,ecx \
383 __asm mov __addr, eax \
385 _zzq_orig->nraddr = __addr; \
388 #define VALGRIND_CALL_NOREDIR_EAX ERROR
390 #define VALGRIND_VEX_INJECT_IR() \
391 do { \
392 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
393 __asm xchg edi,edi \
395 } while (0)
397 #else
398 #error Unsupported compiler.
399 #endif
401 #endif /* PLAT_x86_win32 */
403 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
405 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
406 || defined(PLAT_amd64_solaris) \
407 || defined(PLAT_amd64_freebsd) \
408 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
410 typedef
411 struct {
412 unsigned long int nraddr; /* where's the code? */
414 OrigFn;
416 #define __SPECIAL_INSTRUCTION_PREAMBLE \
417 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
418 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
420 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
421 _zzq_default, _zzq_request, \
422 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
423 __extension__ \
424 ({ volatile unsigned long int _zzq_args[6]; \
425 volatile unsigned long int _zzq_result; \
426 _zzq_args[0] = (unsigned long int)(_zzq_request); \
427 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
428 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
429 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
430 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
431 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
432 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
433 /* %RDX = client_request ( %RAX ) */ \
434 "xchgq %%rbx,%%rbx" \
435 : "=d" (_zzq_result) \
436 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
437 : "cc", "memory" \
438 ); \
439 _zzq_result; \
442 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
443 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
444 volatile unsigned long int __addr; \
445 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
446 /* %RAX = guest_NRADDR */ \
447 "xchgq %%rcx,%%rcx" \
448 : "=a" (__addr) \
450 : "cc", "memory" \
451 ); \
452 _zzq_orig->nraddr = __addr; \
455 #define VALGRIND_CALL_NOREDIR_RAX \
456 __SPECIAL_INSTRUCTION_PREAMBLE \
457 /* call-noredir *%RAX */ \
458 "xchgq %%rdx,%%rdx\n\t"
460 #define VALGRIND_VEX_INJECT_IR() \
461 do { \
462 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
463 "xchgq %%rdi,%%rdi\n\t" \
464 : : : "cc", "memory" \
465 ); \
466 } while (0)
468 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
470 /* ------------------------- amd64-Win64 ------------------------- */
472 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
474 #error Unsupported compiler.
476 #endif /* PLAT_amd64_win64 */
478 /* ------------------------ ppc32-linux ------------------------ */
480 #if defined(PLAT_ppc32_linux)
482 typedef
483 struct {
484 unsigned int nraddr; /* where's the code? */
486 OrigFn;
488 #define __SPECIAL_INSTRUCTION_PREAMBLE \
489 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
490 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
492 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
493 _zzq_default, _zzq_request, \
494 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
496 __extension__ \
497 ({ unsigned int _zzq_args[6]; \
498 unsigned int _zzq_result; \
499 unsigned int* _zzq_ptr; \
500 _zzq_args[0] = (unsigned int)(_zzq_request); \
501 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
502 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
503 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
504 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
505 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
506 _zzq_ptr = _zzq_args; \
507 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
508 "mr 4,%2\n\t" /*ptr*/ \
509 __SPECIAL_INSTRUCTION_PREAMBLE \
510 /* %R3 = client_request ( %R4 ) */ \
511 "or 1,1,1\n\t" \
512 "mr %0,3" /*result*/ \
513 : "=b" (_zzq_result) \
514 : "b" (_zzq_default), "b" (_zzq_ptr) \
515 : "cc", "memory", "r3", "r4"); \
516 _zzq_result; \
519 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
520 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
521 unsigned int __addr; \
522 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
523 /* %R3 = guest_NRADDR */ \
524 "or 2,2,2\n\t" \
525 "mr %0,3" \
526 : "=b" (__addr) \
528 : "cc", "memory", "r3" \
529 ); \
530 _zzq_orig->nraddr = __addr; \
533 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
534 __SPECIAL_INSTRUCTION_PREAMBLE \
535 /* branch-and-link-to-noredir *%R11 */ \
536 "or 3,3,3\n\t"
538 #define VALGRIND_VEX_INJECT_IR() \
539 do { \
540 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
541 "or 5,5,5\n\t" \
542 ); \
543 } while (0)
545 #endif /* PLAT_ppc32_linux */
547 /* ------------------------ ppc64-linux ------------------------ */
549 #if defined(PLAT_ppc64be_linux)
551 typedef
552 struct {
553 unsigned long int nraddr; /* where's the code? */
554 unsigned long int r2; /* what tocptr do we need? */
556 OrigFn;
558 #define __SPECIAL_INSTRUCTION_PREAMBLE \
559 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
560 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
562 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
563 _zzq_default, _zzq_request, \
564 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
566 __extension__ \
567 ({ unsigned long int _zzq_args[6]; \
568 unsigned long int _zzq_result; \
569 unsigned long int* _zzq_ptr; \
570 _zzq_args[0] = (unsigned long int)(_zzq_request); \
571 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
572 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
573 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
574 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
575 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
576 _zzq_ptr = _zzq_args; \
577 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
578 "mr 4,%2\n\t" /*ptr*/ \
579 __SPECIAL_INSTRUCTION_PREAMBLE \
580 /* %R3 = client_request ( %R4 ) */ \
581 "or 1,1,1\n\t" \
582 "mr %0,3" /*result*/ \
583 : "=b" (_zzq_result) \
584 : "b" (_zzq_default), "b" (_zzq_ptr) \
585 : "cc", "memory", "r3", "r4"); \
586 _zzq_result; \
589 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
590 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
591 unsigned long int __addr; \
592 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
593 /* %R3 = guest_NRADDR */ \
594 "or 2,2,2\n\t" \
595 "mr %0,3" \
596 : "=b" (__addr) \
598 : "cc", "memory", "r3" \
599 ); \
600 _zzq_orig->nraddr = __addr; \
601 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
602 /* %R3 = guest_NRADDR_GPR2 */ \
603 "or 4,4,4\n\t" \
604 "mr %0,3" \
605 : "=b" (__addr) \
607 : "cc", "memory", "r3" \
608 ); \
609 _zzq_orig->r2 = __addr; \
612 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
613 __SPECIAL_INSTRUCTION_PREAMBLE \
614 /* branch-and-link-to-noredir *%R11 */ \
615 "or 3,3,3\n\t"
617 #define VALGRIND_VEX_INJECT_IR() \
618 do { \
619 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
620 "or 5,5,5\n\t" \
621 ); \
622 } while (0)
624 #endif /* PLAT_ppc64be_linux */
626 #if defined(PLAT_ppc64le_linux)
628 typedef
629 struct {
630 unsigned long int nraddr; /* where's the code? */
631 unsigned long int r2; /* what tocptr do we need? */
633 OrigFn;
635 #define __SPECIAL_INSTRUCTION_PREAMBLE \
636 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
637 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
639 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
640 _zzq_default, _zzq_request, \
641 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
643 __extension__ \
644 ({ unsigned long int _zzq_args[6]; \
645 unsigned long int _zzq_result; \
646 unsigned long int* _zzq_ptr; \
647 _zzq_args[0] = (unsigned long int)(_zzq_request); \
648 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
649 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
650 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
651 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
652 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
653 _zzq_ptr = _zzq_args; \
654 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
655 "mr 4,%2\n\t" /*ptr*/ \
656 __SPECIAL_INSTRUCTION_PREAMBLE \
657 /* %R3 = client_request ( %R4 ) */ \
658 "or 1,1,1\n\t" \
659 "mr %0,3" /*result*/ \
660 : "=b" (_zzq_result) \
661 : "b" (_zzq_default), "b" (_zzq_ptr) \
662 : "cc", "memory", "r3", "r4"); \
663 _zzq_result; \
666 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
667 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
668 unsigned long int __addr; \
669 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
670 /* %R3 = guest_NRADDR */ \
671 "or 2,2,2\n\t" \
672 "mr %0,3" \
673 : "=b" (__addr) \
675 : "cc", "memory", "r3" \
676 ); \
677 _zzq_orig->nraddr = __addr; \
678 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
679 /* %R3 = guest_NRADDR_GPR2 */ \
680 "or 4,4,4\n\t" \
681 "mr %0,3" \
682 : "=b" (__addr) \
684 : "cc", "memory", "r3" \
685 ); \
686 _zzq_orig->r2 = __addr; \
689 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
690 __SPECIAL_INSTRUCTION_PREAMBLE \
691 /* branch-and-link-to-noredir *%R12 */ \
692 "or 3,3,3\n\t"
694 #define VALGRIND_VEX_INJECT_IR() \
695 do { \
696 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
697 "or 5,5,5\n\t" \
698 ); \
699 } while (0)
701 #endif /* PLAT_ppc64le_linux */
703 /* ------------------------- arm-linux ------------------------- */
705 #if defined(PLAT_arm_linux)
707 typedef
708 struct {
709 unsigned int nraddr; /* where's the code? */
711 OrigFn;
713 #define __SPECIAL_INSTRUCTION_PREAMBLE \
714 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
715 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
717 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
718 _zzq_default, _zzq_request, \
719 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
721 __extension__ \
722 ({volatile unsigned int _zzq_args[6]; \
723 volatile unsigned int _zzq_result; \
724 _zzq_args[0] = (unsigned int)(_zzq_request); \
725 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
726 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
727 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
728 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
729 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
730 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
731 "mov r4, %2\n\t" /*ptr*/ \
732 __SPECIAL_INSTRUCTION_PREAMBLE \
733 /* R3 = client_request ( R4 ) */ \
734 "orr r10, r10, r10\n\t" \
735 "mov %0, r3" /*result*/ \
736 : "=r" (_zzq_result) \
737 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
738 : "cc","memory", "r3", "r4"); \
739 _zzq_result; \
742 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
743 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
744 unsigned int __addr; \
745 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
746 /* R3 = guest_NRADDR */ \
747 "orr r11, r11, r11\n\t" \
748 "mov %0, r3" \
749 : "=r" (__addr) \
751 : "cc", "memory", "r3" \
752 ); \
753 _zzq_orig->nraddr = __addr; \
756 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
757 __SPECIAL_INSTRUCTION_PREAMBLE \
758 /* branch-and-link-to-noredir *%R4 */ \
759 "orr r12, r12, r12\n\t"
761 #define VALGRIND_VEX_INJECT_IR() \
762 do { \
763 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
764 "orr r9, r9, r9\n\t" \
765 : : : "cc", "memory" \
766 ); \
767 } while (0)
769 #endif /* PLAT_arm_linux */
771 /* ------------------------ arm64-linux ------------------------- */
773 #if defined(PLAT_arm64_linux)
775 typedef
776 struct {
777 unsigned long int nraddr; /* where's the code? */
779 OrigFn;
781 #define __SPECIAL_INSTRUCTION_PREAMBLE \
782 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
783 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
785 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
786 _zzq_default, _zzq_request, \
787 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
789 __extension__ \
790 ({volatile unsigned long int _zzq_args[6]; \
791 volatile unsigned long int _zzq_result; \
792 _zzq_args[0] = (unsigned long int)(_zzq_request); \
793 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
794 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
795 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
796 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
797 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
798 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
799 "mov x4, %2\n\t" /*ptr*/ \
800 __SPECIAL_INSTRUCTION_PREAMBLE \
801 /* X3 = client_request ( X4 ) */ \
802 "orr x10, x10, x10\n\t" \
803 "mov %0, x3" /*result*/ \
804 : "=r" (_zzq_result) \
805 : "r" ((unsigned long int)(_zzq_default)), \
806 "r" (&_zzq_args[0]) \
807 : "cc","memory", "x3", "x4"); \
808 _zzq_result; \
811 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
812 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
813 unsigned long int __addr; \
814 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
815 /* X3 = guest_NRADDR */ \
816 "orr x11, x11, x11\n\t" \
817 "mov %0, x3" \
818 : "=r" (__addr) \
820 : "cc", "memory", "x3" \
821 ); \
822 _zzq_orig->nraddr = __addr; \
825 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
826 __SPECIAL_INSTRUCTION_PREAMBLE \
827 /* branch-and-link-to-noredir X8 */ \
828 "orr x12, x12, x12\n\t"
830 #define VALGRIND_VEX_INJECT_IR() \
831 do { \
832 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
833 "orr x9, x9, x9\n\t" \
834 : : : "cc", "memory" \
835 ); \
836 } while (0)
838 #endif /* PLAT_arm64_linux */
840 /* ------------------------ s390x-linux ------------------------ */
842 #if defined(PLAT_s390x_linux)
844 typedef
845 struct {
846 unsigned long int nraddr; /* where's the code? */
848 OrigFn;
850 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
851 * code. This detection is implemented in platform specific toIR.c
852 * (e.g. VEX/priv/guest_s390_decoder.c).
854 #define __SPECIAL_INSTRUCTION_PREAMBLE \
855 "lr 15,15\n\t" \
856 "lr 1,1\n\t" \
857 "lr 2,2\n\t" \
858 "lr 3,3\n\t"
860 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
861 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
862 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
863 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
865 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
866 _zzq_default, _zzq_request, \
867 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
868 __extension__ \
869 ({volatile unsigned long int _zzq_args[6]; \
870 volatile unsigned long int _zzq_result; \
871 _zzq_args[0] = (unsigned long int)(_zzq_request); \
872 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
873 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
874 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
875 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
876 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
877 __asm__ volatile(/* r2 = args */ \
878 "lgr 2,%1\n\t" \
879 /* r3 = default */ \
880 "lgr 3,%2\n\t" \
881 __SPECIAL_INSTRUCTION_PREAMBLE \
882 __CLIENT_REQUEST_CODE \
883 /* results = r3 */ \
884 "lgr %0, 3\n\t" \
885 : "=d" (_zzq_result) \
886 : "a" (&_zzq_args[0]), \
887 "0" ((unsigned long int)_zzq_default) \
888 : "cc", "2", "3", "memory" \
889 ); \
890 _zzq_result; \
893 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
894 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
895 volatile unsigned long int __addr; \
896 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
897 __GET_NR_CONTEXT_CODE \
898 "lgr %0, 3\n\t" \
899 : "=a" (__addr) \
901 : "cc", "3", "memory" \
902 ); \
903 _zzq_orig->nraddr = __addr; \
906 #define VALGRIND_CALL_NOREDIR_R1 \
907 __SPECIAL_INSTRUCTION_PREAMBLE \
908 __CALL_NO_REDIR_CODE
910 #define VALGRIND_VEX_INJECT_IR() \
911 do { \
912 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
913 __VEX_INJECT_IR_CODE); \
914 } while (0)
916 #endif /* PLAT_s390x_linux */
918 /* ------------------------- mips32-linux ---------------- */
920 #if defined(PLAT_mips32_linux)
922 typedef
923 struct {
924 unsigned int nraddr; /* where's the code? */
926 OrigFn;
928 /* .word 0x342
929 * .word 0x742
930 * .word 0xC2
931 * .word 0x4C2*/
932 #define __SPECIAL_INSTRUCTION_PREAMBLE \
933 "srl $0, $0, 13\n\t" \
934 "srl $0, $0, 29\n\t" \
935 "srl $0, $0, 3\n\t" \
936 "srl $0, $0, 19\n\t"
938 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
939 _zzq_default, _zzq_request, \
940 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
941 __extension__ \
942 ({ volatile unsigned int _zzq_args[6]; \
943 volatile unsigned int _zzq_result; \
944 _zzq_args[0] = (unsigned int)(_zzq_request); \
945 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
946 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
947 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
948 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
949 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
950 __asm__ volatile("move $11, %1\n\t" /*default*/ \
951 "move $12, %2\n\t" /*ptr*/ \
952 __SPECIAL_INSTRUCTION_PREAMBLE \
953 /* T3 = client_request ( T4 ) */ \
954 "or $13, $13, $13\n\t" \
955 "move %0, $11\n\t" /*result*/ \
956 : "=r" (_zzq_result) \
957 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
958 : "$11", "$12", "memory"); \
959 _zzq_result; \
962 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
963 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
964 volatile unsigned int __addr; \
965 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
966 /* %t9 = guest_NRADDR */ \
967 "or $14, $14, $14\n\t" \
968 "move %0, $11" /*result*/ \
969 : "=r" (__addr) \
971 : "$11" \
972 ); \
973 _zzq_orig->nraddr = __addr; \
976 #define VALGRIND_CALL_NOREDIR_T9 \
977 __SPECIAL_INSTRUCTION_PREAMBLE \
978 /* call-noredir *%t9 */ \
979 "or $15, $15, $15\n\t"
981 #define VALGRIND_VEX_INJECT_IR() \
982 do { \
983 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
984 "or $11, $11, $11\n\t" \
985 ); \
986 } while (0)
989 #endif /* PLAT_mips32_linux */
991 /* ------------------------- mips64-linux ---------------- */
993 #if defined(PLAT_mips64_linux)
995 typedef
996 struct {
997 unsigned long nraddr; /* where's the code? */
999 OrigFn;
1001 /* dsll $0,$0, 3
1002 * dsll $0,$0, 13
1003 * dsll $0,$0, 29
1004 * dsll $0,$0, 19*/
1005 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1006 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1007 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1009 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1010 _zzq_default, _zzq_request, \
1011 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1012 __extension__ \
1013 ({ volatile unsigned long int _zzq_args[6]; \
1014 volatile unsigned long int _zzq_result; \
1015 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1016 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1017 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1018 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1019 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1020 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1021 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1022 "move $12, %2\n\t" /*ptr*/ \
1023 __SPECIAL_INSTRUCTION_PREAMBLE \
1024 /* $11 = client_request ( $12 ) */ \
1025 "or $13, $13, $13\n\t" \
1026 "move %0, $11\n\t" /*result*/ \
1027 : "=r" (_zzq_result) \
1028 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1029 : "$11", "$12", "memory"); \
1030 _zzq_result; \
1033 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1034 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1035 volatile unsigned long int __addr; \
1036 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1037 /* $11 = guest_NRADDR */ \
1038 "or $14, $14, $14\n\t" \
1039 "move %0, $11" /*result*/ \
1040 : "=r" (__addr) \
1042 : "$11"); \
1043 _zzq_orig->nraddr = __addr; \
1046 #define VALGRIND_CALL_NOREDIR_T9 \
1047 __SPECIAL_INSTRUCTION_PREAMBLE \
1048 /* call-noredir $25 */ \
1049 "or $15, $15, $15\n\t"
1051 #define VALGRIND_VEX_INJECT_IR() \
1052 do { \
1053 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1054 "or $11, $11, $11\n\t" \
1055 ); \
1056 } while (0)
1058 #endif /* PLAT_mips64_linux */
1060 #if defined(PLAT_nanomips_linux)
1062 typedef
1063 struct {
1064 unsigned int nraddr; /* where's the code? */
1066 OrigFn;
1068 8000 c04d srl zero, zero, 13
1069 8000 c05d srl zero, zero, 29
1070 8000 c043 srl zero, zero, 3
1071 8000 c053 srl zero, zero, 19
1074 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1075 "srl[32] $zero, $zero, 29 \n\t" \
1076 "srl[32] $zero, $zero, 3 \n\t" \
1077 "srl[32] $zero, $zero, 19 \n\t"
1079 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1080 _zzq_default, _zzq_request, \
1081 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1082 __extension__ \
1083 ({ volatile unsigned int _zzq_args[6]; \
1084 volatile unsigned int _zzq_result; \
1085 _zzq_args[0] = (unsigned int)(_zzq_request); \
1086 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1087 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1088 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1089 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1090 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1091 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1092 "move $t0, %2\n\t" /* ptr */ \
1093 __SPECIAL_INSTRUCTION_PREAMBLE \
1094 /* $a7 = client_request( $t0 ) */ \
1095 "or[32] $t0, $t0, $t0\n\t" \
1096 "move %0, $a7\n\t" /* result */ \
1097 : "=r" (_zzq_result) \
1098 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1099 : "$a7", "$t0", "memory"); \
1100 _zzq_result; \
1103 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1104 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1105 volatile unsigned long int __addr; \
1106 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1107 /* $a7 = guest_NRADDR */ \
1108 "or[32] $t1, $t1, $t1\n\t" \
1109 "move %0, $a7" /*result*/ \
1110 : "=r" (__addr) \
1112 : "$a7"); \
1113 _zzq_orig->nraddr = __addr; \
1116 #define VALGRIND_CALL_NOREDIR_T9 \
1117 __SPECIAL_INSTRUCTION_PREAMBLE \
1118 /* call-noredir $25 */ \
1119 "or[32] $t2, $t2, $t2\n\t"
1121 #define VALGRIND_VEX_INJECT_IR() \
1122 do { \
1123 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1124 "or[32] $t3, $t3, $t3\n\t" \
1125 ); \
1126 } while (0)
1128 #endif
1129 /* Insert assembly code for other platforms here... */
1131 #endif /* NVALGRIND */
1134 /* ------------------------------------------------------------------ */
1135 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1136 /* ugly. It's the least-worst tradeoff I can think of. */
1137 /* ------------------------------------------------------------------ */
1139 /* This section defines magic (a.k.a appalling-hack) macros for doing
1140 guaranteed-no-redirection macros, so as to get from function
1141 wrappers to the functions they are wrapping. The whole point is to
1142 construct standard call sequences, but to do the call itself with a
1143 special no-redirect call pseudo-instruction that the JIT
1144 understands and handles specially. This section is long and
1145 repetitious, and I can't see a way to make it shorter.
1147 The naming scheme is as follows:
1149 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1151 'W' stands for "word" and 'v' for "void". Hence there are
1152 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1153 and for each, the possibility of returning a word-typed result, or
1154 no result.
1157 /* Use these to write the name of your wrapper. NOTE: duplicates
1158 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1159 the default behaviour equivalance class tag "0000" into the name.
1160 See pub_tool_redir.h for details -- normally you don't need to
1161 think about this, though. */
1163 /* Use an extra level of macroisation so as to ensure the soname/fnname
1164 args are fully macro-expanded before pasting them together. */
1165 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1167 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1168 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1170 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1171 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1173 /* Use this macro from within a wrapper function to collect the
1174 context (address and possibly other info) of the original function.
1175 Once you have that you can then use it in one of the CALL_FN_
1176 macros. The type of the argument _lval is OrigFn. */
1177 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1179 /* Also provide end-user facilities for function replacement, rather
1180 than wrapping. A replacement function differs from a wrapper in
1181 that it has no way to get hold of the original function being
1182 called, and hence no way to call onwards to it. In a replacement
1183 function, VALGRIND_GET_ORIG_FN always returns zero. */
1185 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1186 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1188 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1189 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1191 /* Derivatives of the main macros below, for calling functions
1192 returning void. */
1194 #define CALL_FN_v_v(fnptr) \
1195 do { volatile unsigned long _junk; \
1196 CALL_FN_W_v(_junk,fnptr); } while (0)
1198 #define CALL_FN_v_W(fnptr, arg1) \
1199 do { volatile unsigned long _junk; \
1200 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1202 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1203 do { volatile unsigned long _junk; \
1204 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1206 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1207 do { volatile unsigned long _junk; \
1208 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1210 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1211 do { volatile unsigned long _junk; \
1212 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1214 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1215 do { volatile unsigned long _junk; \
1216 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1218 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1219 do { volatile unsigned long _junk; \
1220 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1222 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1223 do { volatile unsigned long _junk; \
1224 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1226 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1228 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1229 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
1231 /* These regs are trashed by the hidden call. No need to mention eax
1232 as gcc can already see that, plus causes gcc to bomb. */
1233 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1235 /* Macros to save and align the stack before making a function
1236 call and restore it afterwards as gcc may not keep the stack
1237 pointer aligned if it doesn't realise calls are being made
1238 to other functions. */
1240 #define VALGRIND_ALIGN_STACK \
1241 "movl %%esp,%%edi\n\t" \
1242 "andl $0xfffffff0,%%esp\n\t"
1243 #define VALGRIND_RESTORE_STACK \
1244 "movl %%edi,%%esp\n\t"
1246 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1247 long) == 4. */
1249 #define CALL_FN_W_v(lval, orig) \
1250 do { \
1251 volatile OrigFn _orig = (orig); \
1252 volatile unsigned long _argvec[1]; \
1253 volatile unsigned long _res; \
1254 _argvec[0] = (unsigned long)_orig.nraddr; \
1255 __asm__ volatile( \
1256 VALGRIND_ALIGN_STACK \
1257 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1258 VALGRIND_CALL_NOREDIR_EAX \
1259 VALGRIND_RESTORE_STACK \
1260 : /*out*/ "=a" (_res) \
1261 : /*in*/ "a" (&_argvec[0]) \
1262 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1263 ); \
1264 lval = (__typeof__(lval)) _res; \
1265 } while (0)
1267 #define CALL_FN_W_W(lval, orig, arg1) \
1268 do { \
1269 volatile OrigFn _orig = (orig); \
1270 volatile unsigned long _argvec[2]; \
1271 volatile unsigned long _res; \
1272 _argvec[0] = (unsigned long)_orig.nraddr; \
1273 _argvec[1] = (unsigned long)(arg1); \
1274 __asm__ volatile( \
1275 VALGRIND_ALIGN_STACK \
1276 "subl $12, %%esp\n\t" \
1277 "pushl 4(%%eax)\n\t" \
1278 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1279 VALGRIND_CALL_NOREDIR_EAX \
1280 VALGRIND_RESTORE_STACK \
1281 : /*out*/ "=a" (_res) \
1282 : /*in*/ "a" (&_argvec[0]) \
1283 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1284 ); \
1285 lval = (__typeof__(lval)) _res; \
1286 } while (0)
1288 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1289 do { \
1290 volatile OrigFn _orig = (orig); \
1291 volatile unsigned long _argvec[3]; \
1292 volatile unsigned long _res; \
1293 _argvec[0] = (unsigned long)_orig.nraddr; \
1294 _argvec[1] = (unsigned long)(arg1); \
1295 _argvec[2] = (unsigned long)(arg2); \
1296 __asm__ volatile( \
1297 VALGRIND_ALIGN_STACK \
1298 "subl $8, %%esp\n\t" \
1299 "pushl 8(%%eax)\n\t" \
1300 "pushl 4(%%eax)\n\t" \
1301 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1302 VALGRIND_CALL_NOREDIR_EAX \
1303 VALGRIND_RESTORE_STACK \
1304 : /*out*/ "=a" (_res) \
1305 : /*in*/ "a" (&_argvec[0]) \
1306 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1307 ); \
1308 lval = (__typeof__(lval)) _res; \
1309 } while (0)
1311 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1312 do { \
1313 volatile OrigFn _orig = (orig); \
1314 volatile unsigned long _argvec[4]; \
1315 volatile unsigned long _res; \
1316 _argvec[0] = (unsigned long)_orig.nraddr; \
1317 _argvec[1] = (unsigned long)(arg1); \
1318 _argvec[2] = (unsigned long)(arg2); \
1319 _argvec[3] = (unsigned long)(arg3); \
1320 __asm__ volatile( \
1321 VALGRIND_ALIGN_STACK \
1322 "subl $4, %%esp\n\t" \
1323 "pushl 12(%%eax)\n\t" \
1324 "pushl 8(%%eax)\n\t" \
1325 "pushl 4(%%eax)\n\t" \
1326 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1327 VALGRIND_CALL_NOREDIR_EAX \
1328 VALGRIND_RESTORE_STACK \
1329 : /*out*/ "=a" (_res) \
1330 : /*in*/ "a" (&_argvec[0]) \
1331 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1332 ); \
1333 lval = (__typeof__(lval)) _res; \
1334 } while (0)
1336 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1337 do { \
1338 volatile OrigFn _orig = (orig); \
1339 volatile unsigned long _argvec[5]; \
1340 volatile unsigned long _res; \
1341 _argvec[0] = (unsigned long)_orig.nraddr; \
1342 _argvec[1] = (unsigned long)(arg1); \
1343 _argvec[2] = (unsigned long)(arg2); \
1344 _argvec[3] = (unsigned long)(arg3); \
1345 _argvec[4] = (unsigned long)(arg4); \
1346 __asm__ volatile( \
1347 VALGRIND_ALIGN_STACK \
1348 "pushl 16(%%eax)\n\t" \
1349 "pushl 12(%%eax)\n\t" \
1350 "pushl 8(%%eax)\n\t" \
1351 "pushl 4(%%eax)\n\t" \
1352 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1353 VALGRIND_CALL_NOREDIR_EAX \
1354 VALGRIND_RESTORE_STACK \
1355 : /*out*/ "=a" (_res) \
1356 : /*in*/ "a" (&_argvec[0]) \
1357 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1358 ); \
1359 lval = (__typeof__(lval)) _res; \
1360 } while (0)
1362 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1363 do { \
1364 volatile OrigFn _orig = (orig); \
1365 volatile unsigned long _argvec[6]; \
1366 volatile unsigned long _res; \
1367 _argvec[0] = (unsigned long)_orig.nraddr; \
1368 _argvec[1] = (unsigned long)(arg1); \
1369 _argvec[2] = (unsigned long)(arg2); \
1370 _argvec[3] = (unsigned long)(arg3); \
1371 _argvec[4] = (unsigned long)(arg4); \
1372 _argvec[5] = (unsigned long)(arg5); \
1373 __asm__ volatile( \
1374 VALGRIND_ALIGN_STACK \
1375 "subl $12, %%esp\n\t" \
1376 "pushl 20(%%eax)\n\t" \
1377 "pushl 16(%%eax)\n\t" \
1378 "pushl 12(%%eax)\n\t" \
1379 "pushl 8(%%eax)\n\t" \
1380 "pushl 4(%%eax)\n\t" \
1381 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1382 VALGRIND_CALL_NOREDIR_EAX \
1383 VALGRIND_RESTORE_STACK \
1384 : /*out*/ "=a" (_res) \
1385 : /*in*/ "a" (&_argvec[0]) \
1386 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1387 ); \
1388 lval = (__typeof__(lval)) _res; \
1389 } while (0)
1391 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1392 do { \
1393 volatile OrigFn _orig = (orig); \
1394 volatile unsigned long _argvec[7]; \
1395 volatile unsigned long _res; \
1396 _argvec[0] = (unsigned long)_orig.nraddr; \
1397 _argvec[1] = (unsigned long)(arg1); \
1398 _argvec[2] = (unsigned long)(arg2); \
1399 _argvec[3] = (unsigned long)(arg3); \
1400 _argvec[4] = (unsigned long)(arg4); \
1401 _argvec[5] = (unsigned long)(arg5); \
1402 _argvec[6] = (unsigned long)(arg6); \
1403 __asm__ volatile( \
1404 VALGRIND_ALIGN_STACK \
1405 "subl $8, %%esp\n\t" \
1406 "pushl 24(%%eax)\n\t" \
1407 "pushl 20(%%eax)\n\t" \
1408 "pushl 16(%%eax)\n\t" \
1409 "pushl 12(%%eax)\n\t" \
1410 "pushl 8(%%eax)\n\t" \
1411 "pushl 4(%%eax)\n\t" \
1412 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1413 VALGRIND_CALL_NOREDIR_EAX \
1414 VALGRIND_RESTORE_STACK \
1415 : /*out*/ "=a" (_res) \
1416 : /*in*/ "a" (&_argvec[0]) \
1417 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1418 ); \
1419 lval = (__typeof__(lval)) _res; \
1420 } while (0)
1422 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1423 arg7) \
1424 do { \
1425 volatile OrigFn _orig = (orig); \
1426 volatile unsigned long _argvec[8]; \
1427 volatile unsigned long _res; \
1428 _argvec[0] = (unsigned long)_orig.nraddr; \
1429 _argvec[1] = (unsigned long)(arg1); \
1430 _argvec[2] = (unsigned long)(arg2); \
1431 _argvec[3] = (unsigned long)(arg3); \
1432 _argvec[4] = (unsigned long)(arg4); \
1433 _argvec[5] = (unsigned long)(arg5); \
1434 _argvec[6] = (unsigned long)(arg6); \
1435 _argvec[7] = (unsigned long)(arg7); \
1436 __asm__ volatile( \
1437 VALGRIND_ALIGN_STACK \
1438 "subl $4, %%esp\n\t" \
1439 "pushl 28(%%eax)\n\t" \
1440 "pushl 24(%%eax)\n\t" \
1441 "pushl 20(%%eax)\n\t" \
1442 "pushl 16(%%eax)\n\t" \
1443 "pushl 12(%%eax)\n\t" \
1444 "pushl 8(%%eax)\n\t" \
1445 "pushl 4(%%eax)\n\t" \
1446 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1447 VALGRIND_CALL_NOREDIR_EAX \
1448 VALGRIND_RESTORE_STACK \
1449 : /*out*/ "=a" (_res) \
1450 : /*in*/ "a" (&_argvec[0]) \
1451 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1452 ); \
1453 lval = (__typeof__(lval)) _res; \
1454 } while (0)
1456 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1457 arg7,arg8) \
1458 do { \
1459 volatile OrigFn _orig = (orig); \
1460 volatile unsigned long _argvec[9]; \
1461 volatile unsigned long _res; \
1462 _argvec[0] = (unsigned long)_orig.nraddr; \
1463 _argvec[1] = (unsigned long)(arg1); \
1464 _argvec[2] = (unsigned long)(arg2); \
1465 _argvec[3] = (unsigned long)(arg3); \
1466 _argvec[4] = (unsigned long)(arg4); \
1467 _argvec[5] = (unsigned long)(arg5); \
1468 _argvec[6] = (unsigned long)(arg6); \
1469 _argvec[7] = (unsigned long)(arg7); \
1470 _argvec[8] = (unsigned long)(arg8); \
1471 __asm__ volatile( \
1472 VALGRIND_ALIGN_STACK \
1473 "pushl 32(%%eax)\n\t" \
1474 "pushl 28(%%eax)\n\t" \
1475 "pushl 24(%%eax)\n\t" \
1476 "pushl 20(%%eax)\n\t" \
1477 "pushl 16(%%eax)\n\t" \
1478 "pushl 12(%%eax)\n\t" \
1479 "pushl 8(%%eax)\n\t" \
1480 "pushl 4(%%eax)\n\t" \
1481 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1482 VALGRIND_CALL_NOREDIR_EAX \
1483 VALGRIND_RESTORE_STACK \
1484 : /*out*/ "=a" (_res) \
1485 : /*in*/ "a" (&_argvec[0]) \
1486 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1487 ); \
1488 lval = (__typeof__(lval)) _res; \
1489 } while (0)
1491 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1492 arg7,arg8,arg9) \
1493 do { \
1494 volatile OrigFn _orig = (orig); \
1495 volatile unsigned long _argvec[10]; \
1496 volatile unsigned long _res; \
1497 _argvec[0] = (unsigned long)_orig.nraddr; \
1498 _argvec[1] = (unsigned long)(arg1); \
1499 _argvec[2] = (unsigned long)(arg2); \
1500 _argvec[3] = (unsigned long)(arg3); \
1501 _argvec[4] = (unsigned long)(arg4); \
1502 _argvec[5] = (unsigned long)(arg5); \
1503 _argvec[6] = (unsigned long)(arg6); \
1504 _argvec[7] = (unsigned long)(arg7); \
1505 _argvec[8] = (unsigned long)(arg8); \
1506 _argvec[9] = (unsigned long)(arg9); \
1507 __asm__ volatile( \
1508 VALGRIND_ALIGN_STACK \
1509 "subl $12, %%esp\n\t" \
1510 "pushl 36(%%eax)\n\t" \
1511 "pushl 32(%%eax)\n\t" \
1512 "pushl 28(%%eax)\n\t" \
1513 "pushl 24(%%eax)\n\t" \
1514 "pushl 20(%%eax)\n\t" \
1515 "pushl 16(%%eax)\n\t" \
1516 "pushl 12(%%eax)\n\t" \
1517 "pushl 8(%%eax)\n\t" \
1518 "pushl 4(%%eax)\n\t" \
1519 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1520 VALGRIND_CALL_NOREDIR_EAX \
1521 VALGRIND_RESTORE_STACK \
1522 : /*out*/ "=a" (_res) \
1523 : /*in*/ "a" (&_argvec[0]) \
1524 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1525 ); \
1526 lval = (__typeof__(lval)) _res; \
1527 } while (0)
1529 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1530 arg7,arg8,arg9,arg10) \
1531 do { \
1532 volatile OrigFn _orig = (orig); \
1533 volatile unsigned long _argvec[11]; \
1534 volatile unsigned long _res; \
1535 _argvec[0] = (unsigned long)_orig.nraddr; \
1536 _argvec[1] = (unsigned long)(arg1); \
1537 _argvec[2] = (unsigned long)(arg2); \
1538 _argvec[3] = (unsigned long)(arg3); \
1539 _argvec[4] = (unsigned long)(arg4); \
1540 _argvec[5] = (unsigned long)(arg5); \
1541 _argvec[6] = (unsigned long)(arg6); \
1542 _argvec[7] = (unsigned long)(arg7); \
1543 _argvec[8] = (unsigned long)(arg8); \
1544 _argvec[9] = (unsigned long)(arg9); \
1545 _argvec[10] = (unsigned long)(arg10); \
1546 __asm__ volatile( \
1547 VALGRIND_ALIGN_STACK \
1548 "subl $8, %%esp\n\t" \
1549 "pushl 40(%%eax)\n\t" \
1550 "pushl 36(%%eax)\n\t" \
1551 "pushl 32(%%eax)\n\t" \
1552 "pushl 28(%%eax)\n\t" \
1553 "pushl 24(%%eax)\n\t" \
1554 "pushl 20(%%eax)\n\t" \
1555 "pushl 16(%%eax)\n\t" \
1556 "pushl 12(%%eax)\n\t" \
1557 "pushl 8(%%eax)\n\t" \
1558 "pushl 4(%%eax)\n\t" \
1559 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1560 VALGRIND_CALL_NOREDIR_EAX \
1561 VALGRIND_RESTORE_STACK \
1562 : /*out*/ "=a" (_res) \
1563 : /*in*/ "a" (&_argvec[0]) \
1564 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1565 ); \
1566 lval = (__typeof__(lval)) _res; \
1567 } while (0)
1569 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1570 arg6,arg7,arg8,arg9,arg10, \
1571 arg11) \
1572 do { \
1573 volatile OrigFn _orig = (orig); \
1574 volatile unsigned long _argvec[12]; \
1575 volatile unsigned long _res; \
1576 _argvec[0] = (unsigned long)_orig.nraddr; \
1577 _argvec[1] = (unsigned long)(arg1); \
1578 _argvec[2] = (unsigned long)(arg2); \
1579 _argvec[3] = (unsigned long)(arg3); \
1580 _argvec[4] = (unsigned long)(arg4); \
1581 _argvec[5] = (unsigned long)(arg5); \
1582 _argvec[6] = (unsigned long)(arg6); \
1583 _argvec[7] = (unsigned long)(arg7); \
1584 _argvec[8] = (unsigned long)(arg8); \
1585 _argvec[9] = (unsigned long)(arg9); \
1586 _argvec[10] = (unsigned long)(arg10); \
1587 _argvec[11] = (unsigned long)(arg11); \
1588 __asm__ volatile( \
1589 VALGRIND_ALIGN_STACK \
1590 "subl $4, %%esp\n\t" \
1591 "pushl 44(%%eax)\n\t" \
1592 "pushl 40(%%eax)\n\t" \
1593 "pushl 36(%%eax)\n\t" \
1594 "pushl 32(%%eax)\n\t" \
1595 "pushl 28(%%eax)\n\t" \
1596 "pushl 24(%%eax)\n\t" \
1597 "pushl 20(%%eax)\n\t" \
1598 "pushl 16(%%eax)\n\t" \
1599 "pushl 12(%%eax)\n\t" \
1600 "pushl 8(%%eax)\n\t" \
1601 "pushl 4(%%eax)\n\t" \
1602 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1603 VALGRIND_CALL_NOREDIR_EAX \
1604 VALGRIND_RESTORE_STACK \
1605 : /*out*/ "=a" (_res) \
1606 : /*in*/ "a" (&_argvec[0]) \
1607 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1608 ); \
1609 lval = (__typeof__(lval)) _res; \
1610 } while (0)
1612 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1613 arg6,arg7,arg8,arg9,arg10, \
1614 arg11,arg12) \
1615 do { \
1616 volatile OrigFn _orig = (orig); \
1617 volatile unsigned long _argvec[13]; \
1618 volatile unsigned long _res; \
1619 _argvec[0] = (unsigned long)_orig.nraddr; \
1620 _argvec[1] = (unsigned long)(arg1); \
1621 _argvec[2] = (unsigned long)(arg2); \
1622 _argvec[3] = (unsigned long)(arg3); \
1623 _argvec[4] = (unsigned long)(arg4); \
1624 _argvec[5] = (unsigned long)(arg5); \
1625 _argvec[6] = (unsigned long)(arg6); \
1626 _argvec[7] = (unsigned long)(arg7); \
1627 _argvec[8] = (unsigned long)(arg8); \
1628 _argvec[9] = (unsigned long)(arg9); \
1629 _argvec[10] = (unsigned long)(arg10); \
1630 _argvec[11] = (unsigned long)(arg11); \
1631 _argvec[12] = (unsigned long)(arg12); \
1632 __asm__ volatile( \
1633 VALGRIND_ALIGN_STACK \
1634 "pushl 48(%%eax)\n\t" \
1635 "pushl 44(%%eax)\n\t" \
1636 "pushl 40(%%eax)\n\t" \
1637 "pushl 36(%%eax)\n\t" \
1638 "pushl 32(%%eax)\n\t" \
1639 "pushl 28(%%eax)\n\t" \
1640 "pushl 24(%%eax)\n\t" \
1641 "pushl 20(%%eax)\n\t" \
1642 "pushl 16(%%eax)\n\t" \
1643 "pushl 12(%%eax)\n\t" \
1644 "pushl 8(%%eax)\n\t" \
1645 "pushl 4(%%eax)\n\t" \
1646 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1647 VALGRIND_CALL_NOREDIR_EAX \
1648 VALGRIND_RESTORE_STACK \
1649 : /*out*/ "=a" (_res) \
1650 : /*in*/ "a" (&_argvec[0]) \
1651 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1652 ); \
1653 lval = (__typeof__(lval)) _res; \
1654 } while (0)
1656 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1658 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1660 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1661 || defined(PLAT_amd64_solaris) || defined(PLAT_amd64_freebsd)
1663 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1665 /* These regs are trashed by the hidden call. */
1666 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1667 "rdi", "r8", "r9", "r10", "r11"
1669 /* This is all pretty complex. It's so as to make stack unwinding
1670 work reliably. See bug 243270. The basic problem is the sub and
1671 add of 128 of %rsp in all of the following macros. If gcc believes
1672 the CFA is in %rsp, then unwinding may fail, because what's at the
1673 CFA is not what gcc "expected" when it constructs the CFIs for the
1674 places where the macros are instantiated.
1676 But we can't just add a CFI annotation to increase the CFA offset
1677 by 128, to match the sub of 128 from %rsp, because we don't know
1678 whether gcc has chosen %rsp as the CFA at that point, or whether it
1679 has chosen some other register (eg, %rbp). In the latter case,
1680 adding a CFI annotation to change the CFA offset is simply wrong.
1682 So the solution is to get hold of the CFA using
1683 __builtin_dwarf_cfa(), put it in a known register, and add a
1684 CFI annotation to say what the register is. We choose %rbp for
1685 this (perhaps perversely), because:
1687 (1) %rbp is already subject to unwinding. If a new register was
1688 chosen then the unwinder would have to unwind it in all stack
1689 traces, which is expensive, and
1691 (2) %rbp is already subject to precise exception updates in the
1692 JIT. If a new register was chosen, we'd have to have precise
1693 exceptions for it too, which reduces performance of the
1694 generated code.
1696 However .. one extra complication. We can't just whack the result
1697 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1698 list of trashed registers at the end of the inline assembly
1699 fragments; gcc won't allow %rbp to appear in that list. Hence
1700 instead we need to stash %rbp in %r15 for the duration of the asm,
1701 and say that %r15 is trashed instead. gcc seems happy to go with
1702 that.
1704 Oh .. and this all needs to be conditionalised so that it is
1705 unchanged from before this commit, when compiled with older gccs
1706 that don't support __builtin_dwarf_cfa. Furthermore, since
1707 this header file is freestanding, it has to be independent of
1708 config.h, and so the following conditionalisation cannot depend on
1709 configure time checks.
1711 Although it's not clear from
1712 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1713 this expression excludes Darwin.
1714 .cfi directives in Darwin assembly appear to be completely
1715 different and I haven't investigated how they work.
1717 For even more entertainment value, note we have to use the
1718 completely undocumented __builtin_dwarf_cfa(), which appears to
1719 really compute the CFA, whereas __builtin_frame_address(0) claims
1720 to but actually doesn't. See
1721 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1723 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1724 # define __FRAME_POINTER \
1725 ,"r"(__builtin_dwarf_cfa())
1726 # define VALGRIND_CFI_PROLOGUE \
1727 "movq %%rbp, %%r15\n\t" \
1728 "movq %2, %%rbp\n\t" \
1729 ".cfi_remember_state\n\t" \
1730 ".cfi_def_cfa rbp, 0\n\t"
1731 # define VALGRIND_CFI_EPILOGUE \
1732 "movq %%r15, %%rbp\n\t" \
1733 ".cfi_restore_state\n\t"
1734 #else
1735 # define __FRAME_POINTER
1736 # define VALGRIND_CFI_PROLOGUE
1737 # define VALGRIND_CFI_EPILOGUE
1738 #endif
1740 /* Macros to save and align the stack before making a function
1741 call and restore it afterwards as gcc may not keep the stack
1742 pointer aligned if it doesn't realise calls are being made
1743 to other functions. */
1745 #define VALGRIND_ALIGN_STACK \
1746 "movq %%rsp,%%r14\n\t" \
1747 "andq $0xfffffffffffffff0,%%rsp\n\t"
1748 #define VALGRIND_RESTORE_STACK \
1749 "movq %%r14,%%rsp\n\t"
1751 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1752 long) == 8. */
1754 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1755 macros. In order not to trash the stack redzone, we need to drop
1756 %rsp by 128 before the hidden call, and restore afterwards. The
1757 nastyness is that it is only by luck that the stack still appears
1758 to be unwindable during the hidden call - since then the behaviour
1759 of any routine using this macro does not match what the CFI data
1760 says. Sigh.
1762 Why is this important? Imagine that a wrapper has a stack
1763 allocated local, and passes to the hidden call, a pointer to it.
1764 Because gcc does not know about the hidden call, it may allocate
1765 that local in the redzone. Unfortunately the hidden call may then
1766 trash it before it comes to use it. So we must step clear of the
1767 redzone, for the duration of the hidden call, to make it safe.
1769 Probably the same problem afflicts the other redzone-style ABIs too
1770 (ppc64-linux); but for those, the stack is
1771 self describing (none of this CFI nonsense) so at least messing
1772 with the stack pointer doesn't give a danger of non-unwindable
1773 stack. */
1775 #define CALL_FN_W_v(lval, orig) \
1776 do { \
1777 volatile OrigFn _orig = (orig); \
1778 volatile unsigned long _argvec[1]; \
1779 volatile unsigned long _res; \
1780 _argvec[0] = (unsigned long)_orig.nraddr; \
1781 __asm__ volatile( \
1782 VALGRIND_CFI_PROLOGUE \
1783 VALGRIND_ALIGN_STACK \
1784 "subq $128,%%rsp\n\t" \
1785 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1786 VALGRIND_CALL_NOREDIR_RAX \
1787 VALGRIND_RESTORE_STACK \
1788 VALGRIND_CFI_EPILOGUE \
1789 : /*out*/ "=a" (_res) \
1790 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1791 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1792 ); \
1793 lval = (__typeof__(lval)) _res; \
1794 } while (0)
1796 #define CALL_FN_W_W(lval, orig, arg1) \
1797 do { \
1798 volatile OrigFn _orig = (orig); \
1799 volatile unsigned long _argvec[2]; \
1800 volatile unsigned long _res; \
1801 _argvec[0] = (unsigned long)_orig.nraddr; \
1802 _argvec[1] = (unsigned long)(arg1); \
1803 __asm__ volatile( \
1804 VALGRIND_CFI_PROLOGUE \
1805 VALGRIND_ALIGN_STACK \
1806 "subq $128,%%rsp\n\t" \
1807 "movq 8(%%rax), %%rdi\n\t" \
1808 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1809 VALGRIND_CALL_NOREDIR_RAX \
1810 VALGRIND_RESTORE_STACK \
1811 VALGRIND_CFI_EPILOGUE \
1812 : /*out*/ "=a" (_res) \
1813 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1814 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1815 ); \
1816 lval = (__typeof__(lval)) _res; \
1817 } while (0)
1819 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1820 do { \
1821 volatile OrigFn _orig = (orig); \
1822 volatile unsigned long _argvec[3]; \
1823 volatile unsigned long _res; \
1824 _argvec[0] = (unsigned long)_orig.nraddr; \
1825 _argvec[1] = (unsigned long)(arg1); \
1826 _argvec[2] = (unsigned long)(arg2); \
1827 __asm__ volatile( \
1828 VALGRIND_CFI_PROLOGUE \
1829 VALGRIND_ALIGN_STACK \
1830 "subq $128,%%rsp\n\t" \
1831 "movq 16(%%rax), %%rsi\n\t" \
1832 "movq 8(%%rax), %%rdi\n\t" \
1833 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1834 VALGRIND_CALL_NOREDIR_RAX \
1835 VALGRIND_RESTORE_STACK \
1836 VALGRIND_CFI_EPILOGUE \
1837 : /*out*/ "=a" (_res) \
1838 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1839 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1840 ); \
1841 lval = (__typeof__(lval)) _res; \
1842 } while (0)
1844 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1845 do { \
1846 volatile OrigFn _orig = (orig); \
1847 volatile unsigned long _argvec[4]; \
1848 volatile unsigned long _res; \
1849 _argvec[0] = (unsigned long)_orig.nraddr; \
1850 _argvec[1] = (unsigned long)(arg1); \
1851 _argvec[2] = (unsigned long)(arg2); \
1852 _argvec[3] = (unsigned long)(arg3); \
1853 __asm__ volatile( \
1854 VALGRIND_CFI_PROLOGUE \
1855 VALGRIND_ALIGN_STACK \
1856 "subq $128,%%rsp\n\t" \
1857 "movq 24(%%rax), %%rdx\n\t" \
1858 "movq 16(%%rax), %%rsi\n\t" \
1859 "movq 8(%%rax), %%rdi\n\t" \
1860 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1861 VALGRIND_CALL_NOREDIR_RAX \
1862 VALGRIND_RESTORE_STACK \
1863 VALGRIND_CFI_EPILOGUE \
1864 : /*out*/ "=a" (_res) \
1865 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1866 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1867 ); \
1868 lval = (__typeof__(lval)) _res; \
1869 } while (0)
1871 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1872 do { \
1873 volatile OrigFn _orig = (orig); \
1874 volatile unsigned long _argvec[5]; \
1875 volatile unsigned long _res; \
1876 _argvec[0] = (unsigned long)_orig.nraddr; \
1877 _argvec[1] = (unsigned long)(arg1); \
1878 _argvec[2] = (unsigned long)(arg2); \
1879 _argvec[3] = (unsigned long)(arg3); \
1880 _argvec[4] = (unsigned long)(arg4); \
1881 __asm__ volatile( \
1882 VALGRIND_CFI_PROLOGUE \
1883 VALGRIND_ALIGN_STACK \
1884 "subq $128,%%rsp\n\t" \
1885 "movq 32(%%rax), %%rcx\n\t" \
1886 "movq 24(%%rax), %%rdx\n\t" \
1887 "movq 16(%%rax), %%rsi\n\t" \
1888 "movq 8(%%rax), %%rdi\n\t" \
1889 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1890 VALGRIND_CALL_NOREDIR_RAX \
1891 VALGRIND_RESTORE_STACK \
1892 VALGRIND_CFI_EPILOGUE \
1893 : /*out*/ "=a" (_res) \
1894 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1895 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1896 ); \
1897 lval = (__typeof__(lval)) _res; \
1898 } while (0)
1900 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1901 do { \
1902 volatile OrigFn _orig = (orig); \
1903 volatile unsigned long _argvec[6]; \
1904 volatile unsigned long _res; \
1905 _argvec[0] = (unsigned long)_orig.nraddr; \
1906 _argvec[1] = (unsigned long)(arg1); \
1907 _argvec[2] = (unsigned long)(arg2); \
1908 _argvec[3] = (unsigned long)(arg3); \
1909 _argvec[4] = (unsigned long)(arg4); \
1910 _argvec[5] = (unsigned long)(arg5); \
1911 __asm__ volatile( \
1912 VALGRIND_CFI_PROLOGUE \
1913 VALGRIND_ALIGN_STACK \
1914 "subq $128,%%rsp\n\t" \
1915 "movq 40(%%rax), %%r8\n\t" \
1916 "movq 32(%%rax), %%rcx\n\t" \
1917 "movq 24(%%rax), %%rdx\n\t" \
1918 "movq 16(%%rax), %%rsi\n\t" \
1919 "movq 8(%%rax), %%rdi\n\t" \
1920 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1921 VALGRIND_CALL_NOREDIR_RAX \
1922 VALGRIND_RESTORE_STACK \
1923 VALGRIND_CFI_EPILOGUE \
1924 : /*out*/ "=a" (_res) \
1925 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1926 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1927 ); \
1928 lval = (__typeof__(lval)) _res; \
1929 } while (0)
1931 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1932 do { \
1933 volatile OrigFn _orig = (orig); \
1934 volatile unsigned long _argvec[7]; \
1935 volatile unsigned long _res; \
1936 _argvec[0] = (unsigned long)_orig.nraddr; \
1937 _argvec[1] = (unsigned long)(arg1); \
1938 _argvec[2] = (unsigned long)(arg2); \
1939 _argvec[3] = (unsigned long)(arg3); \
1940 _argvec[4] = (unsigned long)(arg4); \
1941 _argvec[5] = (unsigned long)(arg5); \
1942 _argvec[6] = (unsigned long)(arg6); \
1943 __asm__ volatile( \
1944 VALGRIND_CFI_PROLOGUE \
1945 VALGRIND_ALIGN_STACK \
1946 "subq $128,%%rsp\n\t" \
1947 "movq 48(%%rax), %%r9\n\t" \
1948 "movq 40(%%rax), %%r8\n\t" \
1949 "movq 32(%%rax), %%rcx\n\t" \
1950 "movq 24(%%rax), %%rdx\n\t" \
1951 "movq 16(%%rax), %%rsi\n\t" \
1952 "movq 8(%%rax), %%rdi\n\t" \
1953 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1954 VALGRIND_CALL_NOREDIR_RAX \
1955 VALGRIND_RESTORE_STACK \
1956 VALGRIND_CFI_EPILOGUE \
1957 : /*out*/ "=a" (_res) \
1958 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1959 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1960 ); \
1961 lval = (__typeof__(lval)) _res; \
1962 } while (0)
1964 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1965 arg7) \
1966 do { \
1967 volatile OrigFn _orig = (orig); \
1968 volatile unsigned long _argvec[8]; \
1969 volatile unsigned long _res; \
1970 _argvec[0] = (unsigned long)_orig.nraddr; \
1971 _argvec[1] = (unsigned long)(arg1); \
1972 _argvec[2] = (unsigned long)(arg2); \
1973 _argvec[3] = (unsigned long)(arg3); \
1974 _argvec[4] = (unsigned long)(arg4); \
1975 _argvec[5] = (unsigned long)(arg5); \
1976 _argvec[6] = (unsigned long)(arg6); \
1977 _argvec[7] = (unsigned long)(arg7); \
1978 __asm__ volatile( \
1979 VALGRIND_CFI_PROLOGUE \
1980 VALGRIND_ALIGN_STACK \
1981 "subq $136,%%rsp\n\t" \
1982 "pushq 56(%%rax)\n\t" \
1983 "movq 48(%%rax), %%r9\n\t" \
1984 "movq 40(%%rax), %%r8\n\t" \
1985 "movq 32(%%rax), %%rcx\n\t" \
1986 "movq 24(%%rax), %%rdx\n\t" \
1987 "movq 16(%%rax), %%rsi\n\t" \
1988 "movq 8(%%rax), %%rdi\n\t" \
1989 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1990 VALGRIND_CALL_NOREDIR_RAX \
1991 VALGRIND_RESTORE_STACK \
1992 VALGRIND_CFI_EPILOGUE \
1993 : /*out*/ "=a" (_res) \
1994 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1995 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1996 ); \
1997 lval = (__typeof__(lval)) _res; \
1998 } while (0)
2000 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2001 arg7,arg8) \
2002 do { \
2003 volatile OrigFn _orig = (orig); \
2004 volatile unsigned long _argvec[9]; \
2005 volatile unsigned long _res; \
2006 _argvec[0] = (unsigned long)_orig.nraddr; \
2007 _argvec[1] = (unsigned long)(arg1); \
2008 _argvec[2] = (unsigned long)(arg2); \
2009 _argvec[3] = (unsigned long)(arg3); \
2010 _argvec[4] = (unsigned long)(arg4); \
2011 _argvec[5] = (unsigned long)(arg5); \
2012 _argvec[6] = (unsigned long)(arg6); \
2013 _argvec[7] = (unsigned long)(arg7); \
2014 _argvec[8] = (unsigned long)(arg8); \
2015 __asm__ volatile( \
2016 VALGRIND_CFI_PROLOGUE \
2017 VALGRIND_ALIGN_STACK \
2018 "subq $128,%%rsp\n\t" \
2019 "pushq 64(%%rax)\n\t" \
2020 "pushq 56(%%rax)\n\t" \
2021 "movq 48(%%rax), %%r9\n\t" \
2022 "movq 40(%%rax), %%r8\n\t" \
2023 "movq 32(%%rax), %%rcx\n\t" \
2024 "movq 24(%%rax), %%rdx\n\t" \
2025 "movq 16(%%rax), %%rsi\n\t" \
2026 "movq 8(%%rax), %%rdi\n\t" \
2027 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2028 VALGRIND_CALL_NOREDIR_RAX \
2029 VALGRIND_RESTORE_STACK \
2030 VALGRIND_CFI_EPILOGUE \
2031 : /*out*/ "=a" (_res) \
2032 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2033 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2034 ); \
2035 lval = (__typeof__(lval)) _res; \
2036 } while (0)
2038 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2039 arg7,arg8,arg9) \
2040 do { \
2041 volatile OrigFn _orig = (orig); \
2042 volatile unsigned long _argvec[10]; \
2043 volatile unsigned long _res; \
2044 _argvec[0] = (unsigned long)_orig.nraddr; \
2045 _argvec[1] = (unsigned long)(arg1); \
2046 _argvec[2] = (unsigned long)(arg2); \
2047 _argvec[3] = (unsigned long)(arg3); \
2048 _argvec[4] = (unsigned long)(arg4); \
2049 _argvec[5] = (unsigned long)(arg5); \
2050 _argvec[6] = (unsigned long)(arg6); \
2051 _argvec[7] = (unsigned long)(arg7); \
2052 _argvec[8] = (unsigned long)(arg8); \
2053 _argvec[9] = (unsigned long)(arg9); \
2054 __asm__ volatile( \
2055 VALGRIND_CFI_PROLOGUE \
2056 VALGRIND_ALIGN_STACK \
2057 "subq $136,%%rsp\n\t" \
2058 "pushq 72(%%rax)\n\t" \
2059 "pushq 64(%%rax)\n\t" \
2060 "pushq 56(%%rax)\n\t" \
2061 "movq 48(%%rax), %%r9\n\t" \
2062 "movq 40(%%rax), %%r8\n\t" \
2063 "movq 32(%%rax), %%rcx\n\t" \
2064 "movq 24(%%rax), %%rdx\n\t" \
2065 "movq 16(%%rax), %%rsi\n\t" \
2066 "movq 8(%%rax), %%rdi\n\t" \
2067 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2068 VALGRIND_CALL_NOREDIR_RAX \
2069 VALGRIND_RESTORE_STACK \
2070 VALGRIND_CFI_EPILOGUE \
2071 : /*out*/ "=a" (_res) \
2072 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2073 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2074 ); \
2075 lval = (__typeof__(lval)) _res; \
2076 } while (0)
2078 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2079 arg7,arg8,arg9,arg10) \
2080 do { \
2081 volatile OrigFn _orig = (orig); \
2082 volatile unsigned long _argvec[11]; \
2083 volatile unsigned long _res; \
2084 _argvec[0] = (unsigned long)_orig.nraddr; \
2085 _argvec[1] = (unsigned long)(arg1); \
2086 _argvec[2] = (unsigned long)(arg2); \
2087 _argvec[3] = (unsigned long)(arg3); \
2088 _argvec[4] = (unsigned long)(arg4); \
2089 _argvec[5] = (unsigned long)(arg5); \
2090 _argvec[6] = (unsigned long)(arg6); \
2091 _argvec[7] = (unsigned long)(arg7); \
2092 _argvec[8] = (unsigned long)(arg8); \
2093 _argvec[9] = (unsigned long)(arg9); \
2094 _argvec[10] = (unsigned long)(arg10); \
2095 __asm__ volatile( \
2096 VALGRIND_CFI_PROLOGUE \
2097 VALGRIND_ALIGN_STACK \
2098 "subq $128,%%rsp\n\t" \
2099 "pushq 80(%%rax)\n\t" \
2100 "pushq 72(%%rax)\n\t" \
2101 "pushq 64(%%rax)\n\t" \
2102 "pushq 56(%%rax)\n\t" \
2103 "movq 48(%%rax), %%r9\n\t" \
2104 "movq 40(%%rax), %%r8\n\t" \
2105 "movq 32(%%rax), %%rcx\n\t" \
2106 "movq 24(%%rax), %%rdx\n\t" \
2107 "movq 16(%%rax), %%rsi\n\t" \
2108 "movq 8(%%rax), %%rdi\n\t" \
2109 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2110 VALGRIND_CALL_NOREDIR_RAX \
2111 VALGRIND_RESTORE_STACK \
2112 VALGRIND_CFI_EPILOGUE \
2113 : /*out*/ "=a" (_res) \
2114 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2115 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2116 ); \
2117 lval = (__typeof__(lval)) _res; \
2118 } while (0)
2120 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2121 arg7,arg8,arg9,arg10,arg11) \
2122 do { \
2123 volatile OrigFn _orig = (orig); \
2124 volatile unsigned long _argvec[12]; \
2125 volatile unsigned long _res; \
2126 _argvec[0] = (unsigned long)_orig.nraddr; \
2127 _argvec[1] = (unsigned long)(arg1); \
2128 _argvec[2] = (unsigned long)(arg2); \
2129 _argvec[3] = (unsigned long)(arg3); \
2130 _argvec[4] = (unsigned long)(arg4); \
2131 _argvec[5] = (unsigned long)(arg5); \
2132 _argvec[6] = (unsigned long)(arg6); \
2133 _argvec[7] = (unsigned long)(arg7); \
2134 _argvec[8] = (unsigned long)(arg8); \
2135 _argvec[9] = (unsigned long)(arg9); \
2136 _argvec[10] = (unsigned long)(arg10); \
2137 _argvec[11] = (unsigned long)(arg11); \
2138 __asm__ volatile( \
2139 VALGRIND_CFI_PROLOGUE \
2140 VALGRIND_ALIGN_STACK \
2141 "subq $136,%%rsp\n\t" \
2142 "pushq 88(%%rax)\n\t" \
2143 "pushq 80(%%rax)\n\t" \
2144 "pushq 72(%%rax)\n\t" \
2145 "pushq 64(%%rax)\n\t" \
2146 "pushq 56(%%rax)\n\t" \
2147 "movq 48(%%rax), %%r9\n\t" \
2148 "movq 40(%%rax), %%r8\n\t" \
2149 "movq 32(%%rax), %%rcx\n\t" \
2150 "movq 24(%%rax), %%rdx\n\t" \
2151 "movq 16(%%rax), %%rsi\n\t" \
2152 "movq 8(%%rax), %%rdi\n\t" \
2153 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2154 VALGRIND_CALL_NOREDIR_RAX \
2155 VALGRIND_RESTORE_STACK \
2156 VALGRIND_CFI_EPILOGUE \
2157 : /*out*/ "=a" (_res) \
2158 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2159 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2160 ); \
2161 lval = (__typeof__(lval)) _res; \
2162 } while (0)
2164 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2165 arg7,arg8,arg9,arg10,arg11,arg12) \
2166 do { \
2167 volatile OrigFn _orig = (orig); \
2168 volatile unsigned long _argvec[13]; \
2169 volatile unsigned long _res; \
2170 _argvec[0] = (unsigned long)_orig.nraddr; \
2171 _argvec[1] = (unsigned long)(arg1); \
2172 _argvec[2] = (unsigned long)(arg2); \
2173 _argvec[3] = (unsigned long)(arg3); \
2174 _argvec[4] = (unsigned long)(arg4); \
2175 _argvec[5] = (unsigned long)(arg5); \
2176 _argvec[6] = (unsigned long)(arg6); \
2177 _argvec[7] = (unsigned long)(arg7); \
2178 _argvec[8] = (unsigned long)(arg8); \
2179 _argvec[9] = (unsigned long)(arg9); \
2180 _argvec[10] = (unsigned long)(arg10); \
2181 _argvec[11] = (unsigned long)(arg11); \
2182 _argvec[12] = (unsigned long)(arg12); \
2183 __asm__ volatile( \
2184 VALGRIND_CFI_PROLOGUE \
2185 VALGRIND_ALIGN_STACK \
2186 "subq $128,%%rsp\n\t" \
2187 "pushq 96(%%rax)\n\t" \
2188 "pushq 88(%%rax)\n\t" \
2189 "pushq 80(%%rax)\n\t" \
2190 "pushq 72(%%rax)\n\t" \
2191 "pushq 64(%%rax)\n\t" \
2192 "pushq 56(%%rax)\n\t" \
2193 "movq 48(%%rax), %%r9\n\t" \
2194 "movq 40(%%rax), %%r8\n\t" \
2195 "movq 32(%%rax), %%rcx\n\t" \
2196 "movq 24(%%rax), %%rdx\n\t" \
2197 "movq 16(%%rax), %%rsi\n\t" \
2198 "movq 8(%%rax), %%rdi\n\t" \
2199 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2200 VALGRIND_CALL_NOREDIR_RAX \
2201 VALGRIND_RESTORE_STACK \
2202 VALGRIND_CFI_EPILOGUE \
2203 : /*out*/ "=a" (_res) \
2204 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2205 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2206 ); \
2207 lval = (__typeof__(lval)) _res; \
2208 } while (0)
2210 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2212 /* ------------------------ ppc32-linux ------------------------ */
2214 #if defined(PLAT_ppc32_linux)
2216 /* This is useful for finding out about the on-stack stuff:
2218 extern int f9 ( int,int,int,int,int,int,int,int,int );
2219 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2220 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2221 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2223 int g9 ( void ) {
2224 return f9(11,22,33,44,55,66,77,88,99);
2226 int g10 ( void ) {
2227 return f10(11,22,33,44,55,66,77,88,99,110);
2229 int g11 ( void ) {
2230 return f11(11,22,33,44,55,66,77,88,99,110,121);
2232 int g12 ( void ) {
2233 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2237 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2239 /* These regs are trashed by the hidden call. */
2240 #define __CALLER_SAVED_REGS \
2241 "lr", "ctr", "xer", \
2242 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2243 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2244 "r11", "r12", "r13"
2246 /* Macros to save and align the stack before making a function
2247 call and restore it afterwards as gcc may not keep the stack
2248 pointer aligned if it doesn't realise calls are being made
2249 to other functions. */
2251 #define VALGRIND_ALIGN_STACK \
2252 "mr 28,1\n\t" \
2253 "rlwinm 1,1,0,0,27\n\t"
2254 #define VALGRIND_RESTORE_STACK \
2255 "mr 1,28\n\t"
2257 /* These CALL_FN_ macros assume that on ppc32-linux,
2258 sizeof(unsigned long) == 4. */
2260 #define CALL_FN_W_v(lval, orig) \
2261 do { \
2262 volatile OrigFn _orig = (orig); \
2263 volatile unsigned long _argvec[1]; \
2264 volatile unsigned long _res; \
2265 _argvec[0] = (unsigned long)_orig.nraddr; \
2266 __asm__ volatile( \
2267 VALGRIND_ALIGN_STACK \
2268 "mr 11,%1\n\t" \
2269 "lwz 11,0(11)\n\t" /* target->r11 */ \
2270 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2271 VALGRIND_RESTORE_STACK \
2272 "mr %0,3" \
2273 : /*out*/ "=r" (_res) \
2274 : /*in*/ "r" (&_argvec[0]) \
2275 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2276 ); \
2277 lval = (__typeof__(lval)) _res; \
2278 } while (0)
2280 #define CALL_FN_W_W(lval, orig, arg1) \
2281 do { \
2282 volatile OrigFn _orig = (orig); \
2283 volatile unsigned long _argvec[2]; \
2284 volatile unsigned long _res; \
2285 _argvec[0] = (unsigned long)_orig.nraddr; \
2286 _argvec[1] = (unsigned long)arg1; \
2287 __asm__ volatile( \
2288 VALGRIND_ALIGN_STACK \
2289 "mr 11,%1\n\t" \
2290 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2291 "lwz 11,0(11)\n\t" /* target->r11 */ \
2292 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2293 VALGRIND_RESTORE_STACK \
2294 "mr %0,3" \
2295 : /*out*/ "=r" (_res) \
2296 : /*in*/ "r" (&_argvec[0]) \
2297 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2298 ); \
2299 lval = (__typeof__(lval)) _res; \
2300 } while (0)
2302 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2303 do { \
2304 volatile OrigFn _orig = (orig); \
2305 volatile unsigned long _argvec[3]; \
2306 volatile unsigned long _res; \
2307 _argvec[0] = (unsigned long)_orig.nraddr; \
2308 _argvec[1] = (unsigned long)arg1; \
2309 _argvec[2] = (unsigned long)arg2; \
2310 __asm__ volatile( \
2311 VALGRIND_ALIGN_STACK \
2312 "mr 11,%1\n\t" \
2313 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2314 "lwz 4,8(11)\n\t" \
2315 "lwz 11,0(11)\n\t" /* target->r11 */ \
2316 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2317 VALGRIND_RESTORE_STACK \
2318 "mr %0,3" \
2319 : /*out*/ "=r" (_res) \
2320 : /*in*/ "r" (&_argvec[0]) \
2321 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2322 ); \
2323 lval = (__typeof__(lval)) _res; \
2324 } while (0)
2326 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2327 do { \
2328 volatile OrigFn _orig = (orig); \
2329 volatile unsigned long _argvec[4]; \
2330 volatile unsigned long _res; \
2331 _argvec[0] = (unsigned long)_orig.nraddr; \
2332 _argvec[1] = (unsigned long)arg1; \
2333 _argvec[2] = (unsigned long)arg2; \
2334 _argvec[3] = (unsigned long)arg3; \
2335 __asm__ volatile( \
2336 VALGRIND_ALIGN_STACK \
2337 "mr 11,%1\n\t" \
2338 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2339 "lwz 4,8(11)\n\t" \
2340 "lwz 5,12(11)\n\t" \
2341 "lwz 11,0(11)\n\t" /* target->r11 */ \
2342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2343 VALGRIND_RESTORE_STACK \
2344 "mr %0,3" \
2345 : /*out*/ "=r" (_res) \
2346 : /*in*/ "r" (&_argvec[0]) \
2347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2348 ); \
2349 lval = (__typeof__(lval)) _res; \
2350 } while (0)
2352 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2353 do { \
2354 volatile OrigFn _orig = (orig); \
2355 volatile unsigned long _argvec[5]; \
2356 volatile unsigned long _res; \
2357 _argvec[0] = (unsigned long)_orig.nraddr; \
2358 _argvec[1] = (unsigned long)arg1; \
2359 _argvec[2] = (unsigned long)arg2; \
2360 _argvec[3] = (unsigned long)arg3; \
2361 _argvec[4] = (unsigned long)arg4; \
2362 __asm__ volatile( \
2363 VALGRIND_ALIGN_STACK \
2364 "mr 11,%1\n\t" \
2365 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2366 "lwz 4,8(11)\n\t" \
2367 "lwz 5,12(11)\n\t" \
2368 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2369 "lwz 11,0(11)\n\t" /* target->r11 */ \
2370 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2371 VALGRIND_RESTORE_STACK \
2372 "mr %0,3" \
2373 : /*out*/ "=r" (_res) \
2374 : /*in*/ "r" (&_argvec[0]) \
2375 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2376 ); \
2377 lval = (__typeof__(lval)) _res; \
2378 } while (0)
2380 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2381 do { \
2382 volatile OrigFn _orig = (orig); \
2383 volatile unsigned long _argvec[6]; \
2384 volatile unsigned long _res; \
2385 _argvec[0] = (unsigned long)_orig.nraddr; \
2386 _argvec[1] = (unsigned long)arg1; \
2387 _argvec[2] = (unsigned long)arg2; \
2388 _argvec[3] = (unsigned long)arg3; \
2389 _argvec[4] = (unsigned long)arg4; \
2390 _argvec[5] = (unsigned long)arg5; \
2391 __asm__ volatile( \
2392 VALGRIND_ALIGN_STACK \
2393 "mr 11,%1\n\t" \
2394 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2395 "lwz 4,8(11)\n\t" \
2396 "lwz 5,12(11)\n\t" \
2397 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2398 "lwz 7,20(11)\n\t" \
2399 "lwz 11,0(11)\n\t" /* target->r11 */ \
2400 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2401 VALGRIND_RESTORE_STACK \
2402 "mr %0,3" \
2403 : /*out*/ "=r" (_res) \
2404 : /*in*/ "r" (&_argvec[0]) \
2405 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2406 ); \
2407 lval = (__typeof__(lval)) _res; \
2408 } while (0)
2410 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2411 do { \
2412 volatile OrigFn _orig = (orig); \
2413 volatile unsigned long _argvec[7]; \
2414 volatile unsigned long _res; \
2415 _argvec[0] = (unsigned long)_orig.nraddr; \
2416 _argvec[1] = (unsigned long)arg1; \
2417 _argvec[2] = (unsigned long)arg2; \
2418 _argvec[3] = (unsigned long)arg3; \
2419 _argvec[4] = (unsigned long)arg4; \
2420 _argvec[5] = (unsigned long)arg5; \
2421 _argvec[6] = (unsigned long)arg6; \
2422 __asm__ volatile( \
2423 VALGRIND_ALIGN_STACK \
2424 "mr 11,%1\n\t" \
2425 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2426 "lwz 4,8(11)\n\t" \
2427 "lwz 5,12(11)\n\t" \
2428 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2429 "lwz 7,20(11)\n\t" \
2430 "lwz 8,24(11)\n\t" \
2431 "lwz 11,0(11)\n\t" /* target->r11 */ \
2432 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2433 VALGRIND_RESTORE_STACK \
2434 "mr %0,3" \
2435 : /*out*/ "=r" (_res) \
2436 : /*in*/ "r" (&_argvec[0]) \
2437 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2438 ); \
2439 lval = (__typeof__(lval)) _res; \
2440 } while (0)
2442 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2443 arg7) \
2444 do { \
2445 volatile OrigFn _orig = (orig); \
2446 volatile unsigned long _argvec[8]; \
2447 volatile unsigned long _res; \
2448 _argvec[0] = (unsigned long)_orig.nraddr; \
2449 _argvec[1] = (unsigned long)arg1; \
2450 _argvec[2] = (unsigned long)arg2; \
2451 _argvec[3] = (unsigned long)arg3; \
2452 _argvec[4] = (unsigned long)arg4; \
2453 _argvec[5] = (unsigned long)arg5; \
2454 _argvec[6] = (unsigned long)arg6; \
2455 _argvec[7] = (unsigned long)arg7; \
2456 __asm__ volatile( \
2457 VALGRIND_ALIGN_STACK \
2458 "mr 11,%1\n\t" \
2459 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2460 "lwz 4,8(11)\n\t" \
2461 "lwz 5,12(11)\n\t" \
2462 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2463 "lwz 7,20(11)\n\t" \
2464 "lwz 8,24(11)\n\t" \
2465 "lwz 9,28(11)\n\t" \
2466 "lwz 11,0(11)\n\t" /* target->r11 */ \
2467 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2468 VALGRIND_RESTORE_STACK \
2469 "mr %0,3" \
2470 : /*out*/ "=r" (_res) \
2471 : /*in*/ "r" (&_argvec[0]) \
2472 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2473 ); \
2474 lval = (__typeof__(lval)) _res; \
2475 } while (0)
2477 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2478 arg7,arg8) \
2479 do { \
2480 volatile OrigFn _orig = (orig); \
2481 volatile unsigned long _argvec[9]; \
2482 volatile unsigned long _res; \
2483 _argvec[0] = (unsigned long)_orig.nraddr; \
2484 _argvec[1] = (unsigned long)arg1; \
2485 _argvec[2] = (unsigned long)arg2; \
2486 _argvec[3] = (unsigned long)arg3; \
2487 _argvec[4] = (unsigned long)arg4; \
2488 _argvec[5] = (unsigned long)arg5; \
2489 _argvec[6] = (unsigned long)arg6; \
2490 _argvec[7] = (unsigned long)arg7; \
2491 _argvec[8] = (unsigned long)arg8; \
2492 __asm__ volatile( \
2493 VALGRIND_ALIGN_STACK \
2494 "mr 11,%1\n\t" \
2495 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2496 "lwz 4,8(11)\n\t" \
2497 "lwz 5,12(11)\n\t" \
2498 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2499 "lwz 7,20(11)\n\t" \
2500 "lwz 8,24(11)\n\t" \
2501 "lwz 9,28(11)\n\t" \
2502 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2503 "lwz 11,0(11)\n\t" /* target->r11 */ \
2504 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2505 VALGRIND_RESTORE_STACK \
2506 "mr %0,3" \
2507 : /*out*/ "=r" (_res) \
2508 : /*in*/ "r" (&_argvec[0]) \
2509 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2510 ); \
2511 lval = (__typeof__(lval)) _res; \
2512 } while (0)
2514 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2515 arg7,arg8,arg9) \
2516 do { \
2517 volatile OrigFn _orig = (orig); \
2518 volatile unsigned long _argvec[10]; \
2519 volatile unsigned long _res; \
2520 _argvec[0] = (unsigned long)_orig.nraddr; \
2521 _argvec[1] = (unsigned long)arg1; \
2522 _argvec[2] = (unsigned long)arg2; \
2523 _argvec[3] = (unsigned long)arg3; \
2524 _argvec[4] = (unsigned long)arg4; \
2525 _argvec[5] = (unsigned long)arg5; \
2526 _argvec[6] = (unsigned long)arg6; \
2527 _argvec[7] = (unsigned long)arg7; \
2528 _argvec[8] = (unsigned long)arg8; \
2529 _argvec[9] = (unsigned long)arg9; \
2530 __asm__ volatile( \
2531 VALGRIND_ALIGN_STACK \
2532 "mr 11,%1\n\t" \
2533 "addi 1,1,-16\n\t" \
2534 /* arg9 */ \
2535 "lwz 3,36(11)\n\t" \
2536 "stw 3,8(1)\n\t" \
2537 /* args1-8 */ \
2538 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2539 "lwz 4,8(11)\n\t" \
2540 "lwz 5,12(11)\n\t" \
2541 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2542 "lwz 7,20(11)\n\t" \
2543 "lwz 8,24(11)\n\t" \
2544 "lwz 9,28(11)\n\t" \
2545 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2546 "lwz 11,0(11)\n\t" /* target->r11 */ \
2547 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2548 VALGRIND_RESTORE_STACK \
2549 "mr %0,3" \
2550 : /*out*/ "=r" (_res) \
2551 : /*in*/ "r" (&_argvec[0]) \
2552 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2553 ); \
2554 lval = (__typeof__(lval)) _res; \
2555 } while (0)
2557 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2558 arg7,arg8,arg9,arg10) \
2559 do { \
2560 volatile OrigFn _orig = (orig); \
2561 volatile unsigned long _argvec[11]; \
2562 volatile unsigned long _res; \
2563 _argvec[0] = (unsigned long)_orig.nraddr; \
2564 _argvec[1] = (unsigned long)arg1; \
2565 _argvec[2] = (unsigned long)arg2; \
2566 _argvec[3] = (unsigned long)arg3; \
2567 _argvec[4] = (unsigned long)arg4; \
2568 _argvec[5] = (unsigned long)arg5; \
2569 _argvec[6] = (unsigned long)arg6; \
2570 _argvec[7] = (unsigned long)arg7; \
2571 _argvec[8] = (unsigned long)arg8; \
2572 _argvec[9] = (unsigned long)arg9; \
2573 _argvec[10] = (unsigned long)arg10; \
2574 __asm__ volatile( \
2575 VALGRIND_ALIGN_STACK \
2576 "mr 11,%1\n\t" \
2577 "addi 1,1,-16\n\t" \
2578 /* arg10 */ \
2579 "lwz 3,40(11)\n\t" \
2580 "stw 3,12(1)\n\t" \
2581 /* arg9 */ \
2582 "lwz 3,36(11)\n\t" \
2583 "stw 3,8(1)\n\t" \
2584 /* args1-8 */ \
2585 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2586 "lwz 4,8(11)\n\t" \
2587 "lwz 5,12(11)\n\t" \
2588 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2589 "lwz 7,20(11)\n\t" \
2590 "lwz 8,24(11)\n\t" \
2591 "lwz 9,28(11)\n\t" \
2592 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2593 "lwz 11,0(11)\n\t" /* target->r11 */ \
2594 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2595 VALGRIND_RESTORE_STACK \
2596 "mr %0,3" \
2597 : /*out*/ "=r" (_res) \
2598 : /*in*/ "r" (&_argvec[0]) \
2599 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2600 ); \
2601 lval = (__typeof__(lval)) _res; \
2602 } while (0)
2604 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2605 arg7,arg8,arg9,arg10,arg11) \
2606 do { \
2607 volatile OrigFn _orig = (orig); \
2608 volatile unsigned long _argvec[12]; \
2609 volatile unsigned long _res; \
2610 _argvec[0] = (unsigned long)_orig.nraddr; \
2611 _argvec[1] = (unsigned long)arg1; \
2612 _argvec[2] = (unsigned long)arg2; \
2613 _argvec[3] = (unsigned long)arg3; \
2614 _argvec[4] = (unsigned long)arg4; \
2615 _argvec[5] = (unsigned long)arg5; \
2616 _argvec[6] = (unsigned long)arg6; \
2617 _argvec[7] = (unsigned long)arg7; \
2618 _argvec[8] = (unsigned long)arg8; \
2619 _argvec[9] = (unsigned long)arg9; \
2620 _argvec[10] = (unsigned long)arg10; \
2621 _argvec[11] = (unsigned long)arg11; \
2622 __asm__ volatile( \
2623 VALGRIND_ALIGN_STACK \
2624 "mr 11,%1\n\t" \
2625 "addi 1,1,-32\n\t" \
2626 /* arg11 */ \
2627 "lwz 3,44(11)\n\t" \
2628 "stw 3,16(1)\n\t" \
2629 /* arg10 */ \
2630 "lwz 3,40(11)\n\t" \
2631 "stw 3,12(1)\n\t" \
2632 /* arg9 */ \
2633 "lwz 3,36(11)\n\t" \
2634 "stw 3,8(1)\n\t" \
2635 /* args1-8 */ \
2636 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2637 "lwz 4,8(11)\n\t" \
2638 "lwz 5,12(11)\n\t" \
2639 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2640 "lwz 7,20(11)\n\t" \
2641 "lwz 8,24(11)\n\t" \
2642 "lwz 9,28(11)\n\t" \
2643 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2644 "lwz 11,0(11)\n\t" /* target->r11 */ \
2645 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2646 VALGRIND_RESTORE_STACK \
2647 "mr %0,3" \
2648 : /*out*/ "=r" (_res) \
2649 : /*in*/ "r" (&_argvec[0]) \
2650 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2651 ); \
2652 lval = (__typeof__(lval)) _res; \
2653 } while (0)
2655 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2656 arg7,arg8,arg9,arg10,arg11,arg12) \
2657 do { \
2658 volatile OrigFn _orig = (orig); \
2659 volatile unsigned long _argvec[13]; \
2660 volatile unsigned long _res; \
2661 _argvec[0] = (unsigned long)_orig.nraddr; \
2662 _argvec[1] = (unsigned long)arg1; \
2663 _argvec[2] = (unsigned long)arg2; \
2664 _argvec[3] = (unsigned long)arg3; \
2665 _argvec[4] = (unsigned long)arg4; \
2666 _argvec[5] = (unsigned long)arg5; \
2667 _argvec[6] = (unsigned long)arg6; \
2668 _argvec[7] = (unsigned long)arg7; \
2669 _argvec[8] = (unsigned long)arg8; \
2670 _argvec[9] = (unsigned long)arg9; \
2671 _argvec[10] = (unsigned long)arg10; \
2672 _argvec[11] = (unsigned long)arg11; \
2673 _argvec[12] = (unsigned long)arg12; \
2674 __asm__ volatile( \
2675 VALGRIND_ALIGN_STACK \
2676 "mr 11,%1\n\t" \
2677 "addi 1,1,-32\n\t" \
2678 /* arg12 */ \
2679 "lwz 3,48(11)\n\t" \
2680 "stw 3,20(1)\n\t" \
2681 /* arg11 */ \
2682 "lwz 3,44(11)\n\t" \
2683 "stw 3,16(1)\n\t" \
2684 /* arg10 */ \
2685 "lwz 3,40(11)\n\t" \
2686 "stw 3,12(1)\n\t" \
2687 /* arg9 */ \
2688 "lwz 3,36(11)\n\t" \
2689 "stw 3,8(1)\n\t" \
2690 /* args1-8 */ \
2691 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2692 "lwz 4,8(11)\n\t" \
2693 "lwz 5,12(11)\n\t" \
2694 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2695 "lwz 7,20(11)\n\t" \
2696 "lwz 8,24(11)\n\t" \
2697 "lwz 9,28(11)\n\t" \
2698 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2699 "lwz 11,0(11)\n\t" /* target->r11 */ \
2700 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2701 VALGRIND_RESTORE_STACK \
2702 "mr %0,3" \
2703 : /*out*/ "=r" (_res) \
2704 : /*in*/ "r" (&_argvec[0]) \
2705 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2706 ); \
2707 lval = (__typeof__(lval)) _res; \
2708 } while (0)
2710 #endif /* PLAT_ppc32_linux */
2712 /* ------------------------ ppc64-linux ------------------------ */
2714 #if defined(PLAT_ppc64be_linux)
2716 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2718 /* These regs are trashed by the hidden call. */
2719 #define __CALLER_SAVED_REGS \
2720 "lr", "ctr", "xer", \
2721 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2722 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2723 "r11", "r12", "r13"
2725 /* Macros to save and align the stack before making a function
2726 call and restore it afterwards as gcc may not keep the stack
2727 pointer aligned if it doesn't realise calls are being made
2728 to other functions. */
2730 #define VALGRIND_ALIGN_STACK \
2731 "mr 28,1\n\t" \
2732 "rldicr 1,1,0,59\n\t"
2733 #define VALGRIND_RESTORE_STACK \
2734 "mr 1,28\n\t"
2736 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2737 long) == 8. */
2739 #define CALL_FN_W_v(lval, orig) \
2740 do { \
2741 volatile OrigFn _orig = (orig); \
2742 volatile unsigned long _argvec[3+0]; \
2743 volatile unsigned long _res; \
2744 /* _argvec[0] holds current r2 across the call */ \
2745 _argvec[1] = (unsigned long)_orig.r2; \
2746 _argvec[2] = (unsigned long)_orig.nraddr; \
2747 __asm__ volatile( \
2748 VALGRIND_ALIGN_STACK \
2749 "mr 11,%1\n\t" \
2750 "std 2,-16(11)\n\t" /* save tocptr */ \
2751 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2752 "ld 11, 0(11)\n\t" /* target->r11 */ \
2753 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2754 "mr 11,%1\n\t" \
2755 "mr %0,3\n\t" \
2756 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2757 VALGRIND_RESTORE_STACK \
2758 : /*out*/ "=r" (_res) \
2759 : /*in*/ "r" (&_argvec[2]) \
2760 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2761 ); \
2762 lval = (__typeof__(lval)) _res; \
2763 } while (0)
2765 #define CALL_FN_W_W(lval, orig, arg1) \
2766 do { \
2767 volatile OrigFn _orig = (orig); \
2768 volatile unsigned long _argvec[3+1]; \
2769 volatile unsigned long _res; \
2770 /* _argvec[0] holds current r2 across the call */ \
2771 _argvec[1] = (unsigned long)_orig.r2; \
2772 _argvec[2] = (unsigned long)_orig.nraddr; \
2773 _argvec[2+1] = (unsigned long)arg1; \
2774 __asm__ volatile( \
2775 VALGRIND_ALIGN_STACK \
2776 "mr 11,%1\n\t" \
2777 "std 2,-16(11)\n\t" /* save tocptr */ \
2778 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2779 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2780 "ld 11, 0(11)\n\t" /* target->r11 */ \
2781 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2782 "mr 11,%1\n\t" \
2783 "mr %0,3\n\t" \
2784 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2785 VALGRIND_RESTORE_STACK \
2786 : /*out*/ "=r" (_res) \
2787 : /*in*/ "r" (&_argvec[2]) \
2788 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2789 ); \
2790 lval = (__typeof__(lval)) _res; \
2791 } while (0)
2793 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2794 do { \
2795 volatile OrigFn _orig = (orig); \
2796 volatile unsigned long _argvec[3+2]; \
2797 volatile unsigned long _res; \
2798 /* _argvec[0] holds current r2 across the call */ \
2799 _argvec[1] = (unsigned long)_orig.r2; \
2800 _argvec[2] = (unsigned long)_orig.nraddr; \
2801 _argvec[2+1] = (unsigned long)arg1; \
2802 _argvec[2+2] = (unsigned long)arg2; \
2803 __asm__ volatile( \
2804 VALGRIND_ALIGN_STACK \
2805 "mr 11,%1\n\t" \
2806 "std 2,-16(11)\n\t" /* save tocptr */ \
2807 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2808 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2809 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2810 "ld 11, 0(11)\n\t" /* target->r11 */ \
2811 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2812 "mr 11,%1\n\t" \
2813 "mr %0,3\n\t" \
2814 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2815 VALGRIND_RESTORE_STACK \
2816 : /*out*/ "=r" (_res) \
2817 : /*in*/ "r" (&_argvec[2]) \
2818 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2819 ); \
2820 lval = (__typeof__(lval)) _res; \
2821 } while (0)
2823 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2824 do { \
2825 volatile OrigFn _orig = (orig); \
2826 volatile unsigned long _argvec[3+3]; \
2827 volatile unsigned long _res; \
2828 /* _argvec[0] holds current r2 across the call */ \
2829 _argvec[1] = (unsigned long)_orig.r2; \
2830 _argvec[2] = (unsigned long)_orig.nraddr; \
2831 _argvec[2+1] = (unsigned long)arg1; \
2832 _argvec[2+2] = (unsigned long)arg2; \
2833 _argvec[2+3] = (unsigned long)arg3; \
2834 __asm__ volatile( \
2835 VALGRIND_ALIGN_STACK \
2836 "mr 11,%1\n\t" \
2837 "std 2,-16(11)\n\t" /* save tocptr */ \
2838 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2839 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2840 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2841 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2842 "ld 11, 0(11)\n\t" /* target->r11 */ \
2843 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2844 "mr 11,%1\n\t" \
2845 "mr %0,3\n\t" \
2846 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2847 VALGRIND_RESTORE_STACK \
2848 : /*out*/ "=r" (_res) \
2849 : /*in*/ "r" (&_argvec[2]) \
2850 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2851 ); \
2852 lval = (__typeof__(lval)) _res; \
2853 } while (0)
2855 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2856 do { \
2857 volatile OrigFn _orig = (orig); \
2858 volatile unsigned long _argvec[3+4]; \
2859 volatile unsigned long _res; \
2860 /* _argvec[0] holds current r2 across the call */ \
2861 _argvec[1] = (unsigned long)_orig.r2; \
2862 _argvec[2] = (unsigned long)_orig.nraddr; \
2863 _argvec[2+1] = (unsigned long)arg1; \
2864 _argvec[2+2] = (unsigned long)arg2; \
2865 _argvec[2+3] = (unsigned long)arg3; \
2866 _argvec[2+4] = (unsigned long)arg4; \
2867 __asm__ volatile( \
2868 VALGRIND_ALIGN_STACK \
2869 "mr 11,%1\n\t" \
2870 "std 2,-16(11)\n\t" /* save tocptr */ \
2871 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2872 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2873 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2874 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2875 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2876 "ld 11, 0(11)\n\t" /* target->r11 */ \
2877 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2878 "mr 11,%1\n\t" \
2879 "mr %0,3\n\t" \
2880 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2881 VALGRIND_RESTORE_STACK \
2882 : /*out*/ "=r" (_res) \
2883 : /*in*/ "r" (&_argvec[2]) \
2884 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2885 ); \
2886 lval = (__typeof__(lval)) _res; \
2887 } while (0)
2889 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2890 do { \
2891 volatile OrigFn _orig = (orig); \
2892 volatile unsigned long _argvec[3+5]; \
2893 volatile unsigned long _res; \
2894 /* _argvec[0] holds current r2 across the call */ \
2895 _argvec[1] = (unsigned long)_orig.r2; \
2896 _argvec[2] = (unsigned long)_orig.nraddr; \
2897 _argvec[2+1] = (unsigned long)arg1; \
2898 _argvec[2+2] = (unsigned long)arg2; \
2899 _argvec[2+3] = (unsigned long)arg3; \
2900 _argvec[2+4] = (unsigned long)arg4; \
2901 _argvec[2+5] = (unsigned long)arg5; \
2902 __asm__ volatile( \
2903 VALGRIND_ALIGN_STACK \
2904 "mr 11,%1\n\t" \
2905 "std 2,-16(11)\n\t" /* save tocptr */ \
2906 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2907 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2908 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2909 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2910 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2911 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2912 "ld 11, 0(11)\n\t" /* target->r11 */ \
2913 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2914 "mr 11,%1\n\t" \
2915 "mr %0,3\n\t" \
2916 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2917 VALGRIND_RESTORE_STACK \
2918 : /*out*/ "=r" (_res) \
2919 : /*in*/ "r" (&_argvec[2]) \
2920 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2921 ); \
2922 lval = (__typeof__(lval)) _res; \
2923 } while (0)
2925 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2926 do { \
2927 volatile OrigFn _orig = (orig); \
2928 volatile unsigned long _argvec[3+6]; \
2929 volatile unsigned long _res; \
2930 /* _argvec[0] holds current r2 across the call */ \
2931 _argvec[1] = (unsigned long)_orig.r2; \
2932 _argvec[2] = (unsigned long)_orig.nraddr; \
2933 _argvec[2+1] = (unsigned long)arg1; \
2934 _argvec[2+2] = (unsigned long)arg2; \
2935 _argvec[2+3] = (unsigned long)arg3; \
2936 _argvec[2+4] = (unsigned long)arg4; \
2937 _argvec[2+5] = (unsigned long)arg5; \
2938 _argvec[2+6] = (unsigned long)arg6; \
2939 __asm__ volatile( \
2940 VALGRIND_ALIGN_STACK \
2941 "mr 11,%1\n\t" \
2942 "std 2,-16(11)\n\t" /* save tocptr */ \
2943 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2944 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2945 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2946 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2947 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2948 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2949 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2950 "ld 11, 0(11)\n\t" /* target->r11 */ \
2951 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2952 "mr 11,%1\n\t" \
2953 "mr %0,3\n\t" \
2954 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2955 VALGRIND_RESTORE_STACK \
2956 : /*out*/ "=r" (_res) \
2957 : /*in*/ "r" (&_argvec[2]) \
2958 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2959 ); \
2960 lval = (__typeof__(lval)) _res; \
2961 } while (0)
2963 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2964 arg7) \
2965 do { \
2966 volatile OrigFn _orig = (orig); \
2967 volatile unsigned long _argvec[3+7]; \
2968 volatile unsigned long _res; \
2969 /* _argvec[0] holds current r2 across the call */ \
2970 _argvec[1] = (unsigned long)_orig.r2; \
2971 _argvec[2] = (unsigned long)_orig.nraddr; \
2972 _argvec[2+1] = (unsigned long)arg1; \
2973 _argvec[2+2] = (unsigned long)arg2; \
2974 _argvec[2+3] = (unsigned long)arg3; \
2975 _argvec[2+4] = (unsigned long)arg4; \
2976 _argvec[2+5] = (unsigned long)arg5; \
2977 _argvec[2+6] = (unsigned long)arg6; \
2978 _argvec[2+7] = (unsigned long)arg7; \
2979 __asm__ volatile( \
2980 VALGRIND_ALIGN_STACK \
2981 "mr 11,%1\n\t" \
2982 "std 2,-16(11)\n\t" /* save tocptr */ \
2983 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2984 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2985 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2986 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2987 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2988 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2989 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2990 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2991 "ld 11, 0(11)\n\t" /* target->r11 */ \
2992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2993 "mr 11,%1\n\t" \
2994 "mr %0,3\n\t" \
2995 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2996 VALGRIND_RESTORE_STACK \
2997 : /*out*/ "=r" (_res) \
2998 : /*in*/ "r" (&_argvec[2]) \
2999 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3000 ); \
3001 lval = (__typeof__(lval)) _res; \
3002 } while (0)
3004 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3005 arg7,arg8) \
3006 do { \
3007 volatile OrigFn _orig = (orig); \
3008 volatile unsigned long _argvec[3+8]; \
3009 volatile unsigned long _res; \
3010 /* _argvec[0] holds current r2 across the call */ \
3011 _argvec[1] = (unsigned long)_orig.r2; \
3012 _argvec[2] = (unsigned long)_orig.nraddr; \
3013 _argvec[2+1] = (unsigned long)arg1; \
3014 _argvec[2+2] = (unsigned long)arg2; \
3015 _argvec[2+3] = (unsigned long)arg3; \
3016 _argvec[2+4] = (unsigned long)arg4; \
3017 _argvec[2+5] = (unsigned long)arg5; \
3018 _argvec[2+6] = (unsigned long)arg6; \
3019 _argvec[2+7] = (unsigned long)arg7; \
3020 _argvec[2+8] = (unsigned long)arg8; \
3021 __asm__ volatile( \
3022 VALGRIND_ALIGN_STACK \
3023 "mr 11,%1\n\t" \
3024 "std 2,-16(11)\n\t" /* save tocptr */ \
3025 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3026 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3027 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3028 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3029 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3030 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3031 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3032 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3033 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3034 "ld 11, 0(11)\n\t" /* target->r11 */ \
3035 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3036 "mr 11,%1\n\t" \
3037 "mr %0,3\n\t" \
3038 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3039 VALGRIND_RESTORE_STACK \
3040 : /*out*/ "=r" (_res) \
3041 : /*in*/ "r" (&_argvec[2]) \
3042 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3043 ); \
3044 lval = (__typeof__(lval)) _res; \
3045 } while (0)
3047 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3048 arg7,arg8,arg9) \
3049 do { \
3050 volatile OrigFn _orig = (orig); \
3051 volatile unsigned long _argvec[3+9]; \
3052 volatile unsigned long _res; \
3053 /* _argvec[0] holds current r2 across the call */ \
3054 _argvec[1] = (unsigned long)_orig.r2; \
3055 _argvec[2] = (unsigned long)_orig.nraddr; \
3056 _argvec[2+1] = (unsigned long)arg1; \
3057 _argvec[2+2] = (unsigned long)arg2; \
3058 _argvec[2+3] = (unsigned long)arg3; \
3059 _argvec[2+4] = (unsigned long)arg4; \
3060 _argvec[2+5] = (unsigned long)arg5; \
3061 _argvec[2+6] = (unsigned long)arg6; \
3062 _argvec[2+7] = (unsigned long)arg7; \
3063 _argvec[2+8] = (unsigned long)arg8; \
3064 _argvec[2+9] = (unsigned long)arg9; \
3065 __asm__ volatile( \
3066 VALGRIND_ALIGN_STACK \
3067 "mr 11,%1\n\t" \
3068 "std 2,-16(11)\n\t" /* save tocptr */ \
3069 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3070 "addi 1,1,-128\n\t" /* expand stack frame */ \
3071 /* arg9 */ \
3072 "ld 3,72(11)\n\t" \
3073 "std 3,112(1)\n\t" \
3074 /* args1-8 */ \
3075 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3076 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3077 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3078 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3079 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3080 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3081 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3082 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3083 "ld 11, 0(11)\n\t" /* target->r11 */ \
3084 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3085 "mr 11,%1\n\t" \
3086 "mr %0,3\n\t" \
3087 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3088 VALGRIND_RESTORE_STACK \
3089 : /*out*/ "=r" (_res) \
3090 : /*in*/ "r" (&_argvec[2]) \
3091 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3092 ); \
3093 lval = (__typeof__(lval)) _res; \
3094 } while (0)
3096 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3097 arg7,arg8,arg9,arg10) \
3098 do { \
3099 volatile OrigFn _orig = (orig); \
3100 volatile unsigned long _argvec[3+10]; \
3101 volatile unsigned long _res; \
3102 /* _argvec[0] holds current r2 across the call */ \
3103 _argvec[1] = (unsigned long)_orig.r2; \
3104 _argvec[2] = (unsigned long)_orig.nraddr; \
3105 _argvec[2+1] = (unsigned long)arg1; \
3106 _argvec[2+2] = (unsigned long)arg2; \
3107 _argvec[2+3] = (unsigned long)arg3; \
3108 _argvec[2+4] = (unsigned long)arg4; \
3109 _argvec[2+5] = (unsigned long)arg5; \
3110 _argvec[2+6] = (unsigned long)arg6; \
3111 _argvec[2+7] = (unsigned long)arg7; \
3112 _argvec[2+8] = (unsigned long)arg8; \
3113 _argvec[2+9] = (unsigned long)arg9; \
3114 _argvec[2+10] = (unsigned long)arg10; \
3115 __asm__ volatile( \
3116 VALGRIND_ALIGN_STACK \
3117 "mr 11,%1\n\t" \
3118 "std 2,-16(11)\n\t" /* save tocptr */ \
3119 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3120 "addi 1,1,-128\n\t" /* expand stack frame */ \
3121 /* arg10 */ \
3122 "ld 3,80(11)\n\t" \
3123 "std 3,120(1)\n\t" \
3124 /* arg9 */ \
3125 "ld 3,72(11)\n\t" \
3126 "std 3,112(1)\n\t" \
3127 /* args1-8 */ \
3128 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3129 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3130 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3131 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3132 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3133 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3134 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3135 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3136 "ld 11, 0(11)\n\t" /* target->r11 */ \
3137 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3138 "mr 11,%1\n\t" \
3139 "mr %0,3\n\t" \
3140 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3141 VALGRIND_RESTORE_STACK \
3142 : /*out*/ "=r" (_res) \
3143 : /*in*/ "r" (&_argvec[2]) \
3144 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3145 ); \
3146 lval = (__typeof__(lval)) _res; \
3147 } while (0)
3149 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3150 arg7,arg8,arg9,arg10,arg11) \
3151 do { \
3152 volatile OrigFn _orig = (orig); \
3153 volatile unsigned long _argvec[3+11]; \
3154 volatile unsigned long _res; \
3155 /* _argvec[0] holds current r2 across the call */ \
3156 _argvec[1] = (unsigned long)_orig.r2; \
3157 _argvec[2] = (unsigned long)_orig.nraddr; \
3158 _argvec[2+1] = (unsigned long)arg1; \
3159 _argvec[2+2] = (unsigned long)arg2; \
3160 _argvec[2+3] = (unsigned long)arg3; \
3161 _argvec[2+4] = (unsigned long)arg4; \
3162 _argvec[2+5] = (unsigned long)arg5; \
3163 _argvec[2+6] = (unsigned long)arg6; \
3164 _argvec[2+7] = (unsigned long)arg7; \
3165 _argvec[2+8] = (unsigned long)arg8; \
3166 _argvec[2+9] = (unsigned long)arg9; \
3167 _argvec[2+10] = (unsigned long)arg10; \
3168 _argvec[2+11] = (unsigned long)arg11; \
3169 __asm__ volatile( \
3170 VALGRIND_ALIGN_STACK \
3171 "mr 11,%1\n\t" \
3172 "std 2,-16(11)\n\t" /* save tocptr */ \
3173 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3174 "addi 1,1,-144\n\t" /* expand stack frame */ \
3175 /* arg11 */ \
3176 "ld 3,88(11)\n\t" \
3177 "std 3,128(1)\n\t" \
3178 /* arg10 */ \
3179 "ld 3,80(11)\n\t" \
3180 "std 3,120(1)\n\t" \
3181 /* arg9 */ \
3182 "ld 3,72(11)\n\t" \
3183 "std 3,112(1)\n\t" \
3184 /* args1-8 */ \
3185 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3186 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3187 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3188 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3189 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3190 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3191 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3192 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3193 "ld 11, 0(11)\n\t" /* target->r11 */ \
3194 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3195 "mr 11,%1\n\t" \
3196 "mr %0,3\n\t" \
3197 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3198 VALGRIND_RESTORE_STACK \
3199 : /*out*/ "=r" (_res) \
3200 : /*in*/ "r" (&_argvec[2]) \
3201 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3202 ); \
3203 lval = (__typeof__(lval)) _res; \
3204 } while (0)
3206 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3207 arg7,arg8,arg9,arg10,arg11,arg12) \
3208 do { \
3209 volatile OrigFn _orig = (orig); \
3210 volatile unsigned long _argvec[3+12]; \
3211 volatile unsigned long _res; \
3212 /* _argvec[0] holds current r2 across the call */ \
3213 _argvec[1] = (unsigned long)_orig.r2; \
3214 _argvec[2] = (unsigned long)_orig.nraddr; \
3215 _argvec[2+1] = (unsigned long)arg1; \
3216 _argvec[2+2] = (unsigned long)arg2; \
3217 _argvec[2+3] = (unsigned long)arg3; \
3218 _argvec[2+4] = (unsigned long)arg4; \
3219 _argvec[2+5] = (unsigned long)arg5; \
3220 _argvec[2+6] = (unsigned long)arg6; \
3221 _argvec[2+7] = (unsigned long)arg7; \
3222 _argvec[2+8] = (unsigned long)arg8; \
3223 _argvec[2+9] = (unsigned long)arg9; \
3224 _argvec[2+10] = (unsigned long)arg10; \
3225 _argvec[2+11] = (unsigned long)arg11; \
3226 _argvec[2+12] = (unsigned long)arg12; \
3227 __asm__ volatile( \
3228 VALGRIND_ALIGN_STACK \
3229 "mr 11,%1\n\t" \
3230 "std 2,-16(11)\n\t" /* save tocptr */ \
3231 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3232 "addi 1,1,-144\n\t" /* expand stack frame */ \
3233 /* arg12 */ \
3234 "ld 3,96(11)\n\t" \
3235 "std 3,136(1)\n\t" \
3236 /* arg11 */ \
3237 "ld 3,88(11)\n\t" \
3238 "std 3,128(1)\n\t" \
3239 /* arg10 */ \
3240 "ld 3,80(11)\n\t" \
3241 "std 3,120(1)\n\t" \
3242 /* arg9 */ \
3243 "ld 3,72(11)\n\t" \
3244 "std 3,112(1)\n\t" \
3245 /* args1-8 */ \
3246 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3247 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3248 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3249 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3250 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3251 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3252 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3253 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3254 "ld 11, 0(11)\n\t" /* target->r11 */ \
3255 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3256 "mr 11,%1\n\t" \
3257 "mr %0,3\n\t" \
3258 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3259 VALGRIND_RESTORE_STACK \
3260 : /*out*/ "=r" (_res) \
3261 : /*in*/ "r" (&_argvec[2]) \
3262 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3263 ); \
3264 lval = (__typeof__(lval)) _res; \
3265 } while (0)
3267 #endif /* PLAT_ppc64be_linux */
3269 /* ------------------------- ppc64le-linux ----------------------- */
3270 #if defined(PLAT_ppc64le_linux)
3272 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3274 /* These regs are trashed by the hidden call. */
3275 #define __CALLER_SAVED_REGS \
3276 "lr", "ctr", "xer", \
3277 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3278 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3279 "r11", "r12", "r13"
3281 /* Macros to save and align the stack before making a function
3282 call and restore it afterwards as gcc may not keep the stack
3283 pointer aligned if it doesn't realise calls are being made
3284 to other functions. */
3286 #define VALGRIND_ALIGN_STACK \
3287 "mr 28,1\n\t" \
3288 "rldicr 1,1,0,59\n\t"
3289 #define VALGRIND_RESTORE_STACK \
3290 "mr 1,28\n\t"
3292 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3293 long) == 8. */
3295 #define CALL_FN_W_v(lval, orig) \
3296 do { \
3297 volatile OrigFn _orig = (orig); \
3298 volatile unsigned long _argvec[3+0]; \
3299 volatile unsigned long _res; \
3300 /* _argvec[0] holds current r2 across the call */ \
3301 _argvec[1] = (unsigned long)_orig.r2; \
3302 _argvec[2] = (unsigned long)_orig.nraddr; \
3303 __asm__ volatile( \
3304 VALGRIND_ALIGN_STACK \
3305 "mr 12,%1\n\t" \
3306 "std 2,-16(12)\n\t" /* save tocptr */ \
3307 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3308 "ld 12, 0(12)\n\t" /* target->r12 */ \
3309 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3310 "mr 12,%1\n\t" \
3311 "mr %0,3\n\t" \
3312 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3313 VALGRIND_RESTORE_STACK \
3314 : /*out*/ "=r" (_res) \
3315 : /*in*/ "r" (&_argvec[2]) \
3316 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3317 ); \
3318 lval = (__typeof__(lval)) _res; \
3319 } while (0)
3321 #define CALL_FN_W_W(lval, orig, arg1) \
3322 do { \
3323 volatile OrigFn _orig = (orig); \
3324 volatile unsigned long _argvec[3+1]; \
3325 volatile unsigned long _res; \
3326 /* _argvec[0] holds current r2 across the call */ \
3327 _argvec[1] = (unsigned long)_orig.r2; \
3328 _argvec[2] = (unsigned long)_orig.nraddr; \
3329 _argvec[2+1] = (unsigned long)arg1; \
3330 __asm__ volatile( \
3331 VALGRIND_ALIGN_STACK \
3332 "mr 12,%1\n\t" \
3333 "std 2,-16(12)\n\t" /* save tocptr */ \
3334 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3335 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3336 "ld 12, 0(12)\n\t" /* target->r12 */ \
3337 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3338 "mr 12,%1\n\t" \
3339 "mr %0,3\n\t" \
3340 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3341 VALGRIND_RESTORE_STACK \
3342 : /*out*/ "=r" (_res) \
3343 : /*in*/ "r" (&_argvec[2]) \
3344 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3345 ); \
3346 lval = (__typeof__(lval)) _res; \
3347 } while (0)
3349 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3350 do { \
3351 volatile OrigFn _orig = (orig); \
3352 volatile unsigned long _argvec[3+2]; \
3353 volatile unsigned long _res; \
3354 /* _argvec[0] holds current r2 across the call */ \
3355 _argvec[1] = (unsigned long)_orig.r2; \
3356 _argvec[2] = (unsigned long)_orig.nraddr; \
3357 _argvec[2+1] = (unsigned long)arg1; \
3358 _argvec[2+2] = (unsigned long)arg2; \
3359 __asm__ volatile( \
3360 VALGRIND_ALIGN_STACK \
3361 "mr 12,%1\n\t" \
3362 "std 2,-16(12)\n\t" /* save tocptr */ \
3363 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3364 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3365 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3366 "ld 12, 0(12)\n\t" /* target->r12 */ \
3367 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3368 "mr 12,%1\n\t" \
3369 "mr %0,3\n\t" \
3370 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3371 VALGRIND_RESTORE_STACK \
3372 : /*out*/ "=r" (_res) \
3373 : /*in*/ "r" (&_argvec[2]) \
3374 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3375 ); \
3376 lval = (__typeof__(lval)) _res; \
3377 } while (0)
3379 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3380 do { \
3381 volatile OrigFn _orig = (orig); \
3382 volatile unsigned long _argvec[3+3]; \
3383 volatile unsigned long _res; \
3384 /* _argvec[0] holds current r2 across the call */ \
3385 _argvec[1] = (unsigned long)_orig.r2; \
3386 _argvec[2] = (unsigned long)_orig.nraddr; \
3387 _argvec[2+1] = (unsigned long)arg1; \
3388 _argvec[2+2] = (unsigned long)arg2; \
3389 _argvec[2+3] = (unsigned long)arg3; \
3390 __asm__ volatile( \
3391 VALGRIND_ALIGN_STACK \
3392 "mr 12,%1\n\t" \
3393 "std 2,-16(12)\n\t" /* save tocptr */ \
3394 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3395 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3396 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3397 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3398 "ld 12, 0(12)\n\t" /* target->r12 */ \
3399 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3400 "mr 12,%1\n\t" \
3401 "mr %0,3\n\t" \
3402 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3403 VALGRIND_RESTORE_STACK \
3404 : /*out*/ "=r" (_res) \
3405 : /*in*/ "r" (&_argvec[2]) \
3406 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3407 ); \
3408 lval = (__typeof__(lval)) _res; \
3409 } while (0)
3411 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3412 do { \
3413 volatile OrigFn _orig = (orig); \
3414 volatile unsigned long _argvec[3+4]; \
3415 volatile unsigned long _res; \
3416 /* _argvec[0] holds current r2 across the call */ \
3417 _argvec[1] = (unsigned long)_orig.r2; \
3418 _argvec[2] = (unsigned long)_orig.nraddr; \
3419 _argvec[2+1] = (unsigned long)arg1; \
3420 _argvec[2+2] = (unsigned long)arg2; \
3421 _argvec[2+3] = (unsigned long)arg3; \
3422 _argvec[2+4] = (unsigned long)arg4; \
3423 __asm__ volatile( \
3424 VALGRIND_ALIGN_STACK \
3425 "mr 12,%1\n\t" \
3426 "std 2,-16(12)\n\t" /* save tocptr */ \
3427 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3428 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3429 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3430 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3431 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3432 "ld 12, 0(12)\n\t" /* target->r12 */ \
3433 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3434 "mr 12,%1\n\t" \
3435 "mr %0,3\n\t" \
3436 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3437 VALGRIND_RESTORE_STACK \
3438 : /*out*/ "=r" (_res) \
3439 : /*in*/ "r" (&_argvec[2]) \
3440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3441 ); \
3442 lval = (__typeof__(lval)) _res; \
3443 } while (0)
3445 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3446 do { \
3447 volatile OrigFn _orig = (orig); \
3448 volatile unsigned long _argvec[3+5]; \
3449 volatile unsigned long _res; \
3450 /* _argvec[0] holds current r2 across the call */ \
3451 _argvec[1] = (unsigned long)_orig.r2; \
3452 _argvec[2] = (unsigned long)_orig.nraddr; \
3453 _argvec[2+1] = (unsigned long)arg1; \
3454 _argvec[2+2] = (unsigned long)arg2; \
3455 _argvec[2+3] = (unsigned long)arg3; \
3456 _argvec[2+4] = (unsigned long)arg4; \
3457 _argvec[2+5] = (unsigned long)arg5; \
3458 __asm__ volatile( \
3459 VALGRIND_ALIGN_STACK \
3460 "mr 12,%1\n\t" \
3461 "std 2,-16(12)\n\t" /* save tocptr */ \
3462 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3463 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3464 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3465 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3466 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3467 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3468 "ld 12, 0(12)\n\t" /* target->r12 */ \
3469 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3470 "mr 12,%1\n\t" \
3471 "mr %0,3\n\t" \
3472 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3473 VALGRIND_RESTORE_STACK \
3474 : /*out*/ "=r" (_res) \
3475 : /*in*/ "r" (&_argvec[2]) \
3476 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3477 ); \
3478 lval = (__typeof__(lval)) _res; \
3479 } while (0)
3481 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3482 do { \
3483 volatile OrigFn _orig = (orig); \
3484 volatile unsigned long _argvec[3+6]; \
3485 volatile unsigned long _res; \
3486 /* _argvec[0] holds current r2 across the call */ \
3487 _argvec[1] = (unsigned long)_orig.r2; \
3488 _argvec[2] = (unsigned long)_orig.nraddr; \
3489 _argvec[2+1] = (unsigned long)arg1; \
3490 _argvec[2+2] = (unsigned long)arg2; \
3491 _argvec[2+3] = (unsigned long)arg3; \
3492 _argvec[2+4] = (unsigned long)arg4; \
3493 _argvec[2+5] = (unsigned long)arg5; \
3494 _argvec[2+6] = (unsigned long)arg6; \
3495 __asm__ volatile( \
3496 VALGRIND_ALIGN_STACK \
3497 "mr 12,%1\n\t" \
3498 "std 2,-16(12)\n\t" /* save tocptr */ \
3499 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3500 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3501 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3502 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3503 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3504 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3505 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3506 "ld 12, 0(12)\n\t" /* target->r12 */ \
3507 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3508 "mr 12,%1\n\t" \
3509 "mr %0,3\n\t" \
3510 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3511 VALGRIND_RESTORE_STACK \
3512 : /*out*/ "=r" (_res) \
3513 : /*in*/ "r" (&_argvec[2]) \
3514 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3515 ); \
3516 lval = (__typeof__(lval)) _res; \
3517 } while (0)
3519 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3520 arg7) \
3521 do { \
3522 volatile OrigFn _orig = (orig); \
3523 volatile unsigned long _argvec[3+7]; \
3524 volatile unsigned long _res; \
3525 /* _argvec[0] holds current r2 across the call */ \
3526 _argvec[1] = (unsigned long)_orig.r2; \
3527 _argvec[2] = (unsigned long)_orig.nraddr; \
3528 _argvec[2+1] = (unsigned long)arg1; \
3529 _argvec[2+2] = (unsigned long)arg2; \
3530 _argvec[2+3] = (unsigned long)arg3; \
3531 _argvec[2+4] = (unsigned long)arg4; \
3532 _argvec[2+5] = (unsigned long)arg5; \
3533 _argvec[2+6] = (unsigned long)arg6; \
3534 _argvec[2+7] = (unsigned long)arg7; \
3535 __asm__ volatile( \
3536 VALGRIND_ALIGN_STACK \
3537 "mr 12,%1\n\t" \
3538 "std 2,-16(12)\n\t" /* save tocptr */ \
3539 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3540 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3541 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3542 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3543 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3544 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3545 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3546 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3547 "ld 12, 0(12)\n\t" /* target->r12 */ \
3548 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3549 "mr 12,%1\n\t" \
3550 "mr %0,3\n\t" \
3551 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3552 VALGRIND_RESTORE_STACK \
3553 : /*out*/ "=r" (_res) \
3554 : /*in*/ "r" (&_argvec[2]) \
3555 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3556 ); \
3557 lval = (__typeof__(lval)) _res; \
3558 } while (0)
3560 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3561 arg7,arg8) \
3562 do { \
3563 volatile OrigFn _orig = (orig); \
3564 volatile unsigned long _argvec[3+8]; \
3565 volatile unsigned long _res; \
3566 /* _argvec[0] holds current r2 across the call */ \
3567 _argvec[1] = (unsigned long)_orig.r2; \
3568 _argvec[2] = (unsigned long)_orig.nraddr; \
3569 _argvec[2+1] = (unsigned long)arg1; \
3570 _argvec[2+2] = (unsigned long)arg2; \
3571 _argvec[2+3] = (unsigned long)arg3; \
3572 _argvec[2+4] = (unsigned long)arg4; \
3573 _argvec[2+5] = (unsigned long)arg5; \
3574 _argvec[2+6] = (unsigned long)arg6; \
3575 _argvec[2+7] = (unsigned long)arg7; \
3576 _argvec[2+8] = (unsigned long)arg8; \
3577 __asm__ volatile( \
3578 VALGRIND_ALIGN_STACK \
3579 "mr 12,%1\n\t" \
3580 "std 2,-16(12)\n\t" /* save tocptr */ \
3581 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3582 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3583 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3584 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3585 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3586 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3587 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3588 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3589 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3590 "ld 12, 0(12)\n\t" /* target->r12 */ \
3591 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3592 "mr 12,%1\n\t" \
3593 "mr %0,3\n\t" \
3594 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3595 VALGRIND_RESTORE_STACK \
3596 : /*out*/ "=r" (_res) \
3597 : /*in*/ "r" (&_argvec[2]) \
3598 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3599 ); \
3600 lval = (__typeof__(lval)) _res; \
3601 } while (0)
3603 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3604 arg7,arg8,arg9) \
3605 do { \
3606 volatile OrigFn _orig = (orig); \
3607 volatile unsigned long _argvec[3+9]; \
3608 volatile unsigned long _res; \
3609 /* _argvec[0] holds current r2 across the call */ \
3610 _argvec[1] = (unsigned long)_orig.r2; \
3611 _argvec[2] = (unsigned long)_orig.nraddr; \
3612 _argvec[2+1] = (unsigned long)arg1; \
3613 _argvec[2+2] = (unsigned long)arg2; \
3614 _argvec[2+3] = (unsigned long)arg3; \
3615 _argvec[2+4] = (unsigned long)arg4; \
3616 _argvec[2+5] = (unsigned long)arg5; \
3617 _argvec[2+6] = (unsigned long)arg6; \
3618 _argvec[2+7] = (unsigned long)arg7; \
3619 _argvec[2+8] = (unsigned long)arg8; \
3620 _argvec[2+9] = (unsigned long)arg9; \
3621 __asm__ volatile( \
3622 VALGRIND_ALIGN_STACK \
3623 "mr 12,%1\n\t" \
3624 "std 2,-16(12)\n\t" /* save tocptr */ \
3625 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3626 "addi 1,1,-128\n\t" /* expand stack frame */ \
3627 /* arg9 */ \
3628 "ld 3,72(12)\n\t" \
3629 "std 3,96(1)\n\t" \
3630 /* args1-8 */ \
3631 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3632 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3633 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3634 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3635 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3636 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3637 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3638 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3639 "ld 12, 0(12)\n\t" /* target->r12 */ \
3640 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3641 "mr 12,%1\n\t" \
3642 "mr %0,3\n\t" \
3643 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3644 VALGRIND_RESTORE_STACK \
3645 : /*out*/ "=r" (_res) \
3646 : /*in*/ "r" (&_argvec[2]) \
3647 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3648 ); \
3649 lval = (__typeof__(lval)) _res; \
3650 } while (0)
3652 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3653 arg7,arg8,arg9,arg10) \
3654 do { \
3655 volatile OrigFn _orig = (orig); \
3656 volatile unsigned long _argvec[3+10]; \
3657 volatile unsigned long _res; \
3658 /* _argvec[0] holds current r2 across the call */ \
3659 _argvec[1] = (unsigned long)_orig.r2; \
3660 _argvec[2] = (unsigned long)_orig.nraddr; \
3661 _argvec[2+1] = (unsigned long)arg1; \
3662 _argvec[2+2] = (unsigned long)arg2; \
3663 _argvec[2+3] = (unsigned long)arg3; \
3664 _argvec[2+4] = (unsigned long)arg4; \
3665 _argvec[2+5] = (unsigned long)arg5; \
3666 _argvec[2+6] = (unsigned long)arg6; \
3667 _argvec[2+7] = (unsigned long)arg7; \
3668 _argvec[2+8] = (unsigned long)arg8; \
3669 _argvec[2+9] = (unsigned long)arg9; \
3670 _argvec[2+10] = (unsigned long)arg10; \
3671 __asm__ volatile( \
3672 VALGRIND_ALIGN_STACK \
3673 "mr 12,%1\n\t" \
3674 "std 2,-16(12)\n\t" /* save tocptr */ \
3675 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3676 "addi 1,1,-128\n\t" /* expand stack frame */ \
3677 /* arg10 */ \
3678 "ld 3,80(12)\n\t" \
3679 "std 3,104(1)\n\t" \
3680 /* arg9 */ \
3681 "ld 3,72(12)\n\t" \
3682 "std 3,96(1)\n\t" \
3683 /* args1-8 */ \
3684 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3685 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3686 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3687 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3688 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3689 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3690 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3691 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3692 "ld 12, 0(12)\n\t" /* target->r12 */ \
3693 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3694 "mr 12,%1\n\t" \
3695 "mr %0,3\n\t" \
3696 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3697 VALGRIND_RESTORE_STACK \
3698 : /*out*/ "=r" (_res) \
3699 : /*in*/ "r" (&_argvec[2]) \
3700 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3701 ); \
3702 lval = (__typeof__(lval)) _res; \
3703 } while (0)
3705 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3706 arg7,arg8,arg9,arg10,arg11) \
3707 do { \
3708 volatile OrigFn _orig = (orig); \
3709 volatile unsigned long _argvec[3+11]; \
3710 volatile unsigned long _res; \
3711 /* _argvec[0] holds current r2 across the call */ \
3712 _argvec[1] = (unsigned long)_orig.r2; \
3713 _argvec[2] = (unsigned long)_orig.nraddr; \
3714 _argvec[2+1] = (unsigned long)arg1; \
3715 _argvec[2+2] = (unsigned long)arg2; \
3716 _argvec[2+3] = (unsigned long)arg3; \
3717 _argvec[2+4] = (unsigned long)arg4; \
3718 _argvec[2+5] = (unsigned long)arg5; \
3719 _argvec[2+6] = (unsigned long)arg6; \
3720 _argvec[2+7] = (unsigned long)arg7; \
3721 _argvec[2+8] = (unsigned long)arg8; \
3722 _argvec[2+9] = (unsigned long)arg9; \
3723 _argvec[2+10] = (unsigned long)arg10; \
3724 _argvec[2+11] = (unsigned long)arg11; \
3725 __asm__ volatile( \
3726 VALGRIND_ALIGN_STACK \
3727 "mr 12,%1\n\t" \
3728 "std 2,-16(12)\n\t" /* save tocptr */ \
3729 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3730 "addi 1,1,-144\n\t" /* expand stack frame */ \
3731 /* arg11 */ \
3732 "ld 3,88(12)\n\t" \
3733 "std 3,112(1)\n\t" \
3734 /* arg10 */ \
3735 "ld 3,80(12)\n\t" \
3736 "std 3,104(1)\n\t" \
3737 /* arg9 */ \
3738 "ld 3,72(12)\n\t" \
3739 "std 3,96(1)\n\t" \
3740 /* args1-8 */ \
3741 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3742 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3743 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3744 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3745 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3746 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3747 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3748 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3749 "ld 12, 0(12)\n\t" /* target->r12 */ \
3750 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3751 "mr 12,%1\n\t" \
3752 "mr %0,3\n\t" \
3753 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3754 VALGRIND_RESTORE_STACK \
3755 : /*out*/ "=r" (_res) \
3756 : /*in*/ "r" (&_argvec[2]) \
3757 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3758 ); \
3759 lval = (__typeof__(lval)) _res; \
3760 } while (0)
3762 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3763 arg7,arg8,arg9,arg10,arg11,arg12) \
3764 do { \
3765 volatile OrigFn _orig = (orig); \
3766 volatile unsigned long _argvec[3+12]; \
3767 volatile unsigned long _res; \
3768 /* _argvec[0] holds current r2 across the call */ \
3769 _argvec[1] = (unsigned long)_orig.r2; \
3770 _argvec[2] = (unsigned long)_orig.nraddr; \
3771 _argvec[2+1] = (unsigned long)arg1; \
3772 _argvec[2+2] = (unsigned long)arg2; \
3773 _argvec[2+3] = (unsigned long)arg3; \
3774 _argvec[2+4] = (unsigned long)arg4; \
3775 _argvec[2+5] = (unsigned long)arg5; \
3776 _argvec[2+6] = (unsigned long)arg6; \
3777 _argvec[2+7] = (unsigned long)arg7; \
3778 _argvec[2+8] = (unsigned long)arg8; \
3779 _argvec[2+9] = (unsigned long)arg9; \
3780 _argvec[2+10] = (unsigned long)arg10; \
3781 _argvec[2+11] = (unsigned long)arg11; \
3782 _argvec[2+12] = (unsigned long)arg12; \
3783 __asm__ volatile( \
3784 VALGRIND_ALIGN_STACK \
3785 "mr 12,%1\n\t" \
3786 "std 2,-16(12)\n\t" /* save tocptr */ \
3787 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3788 "addi 1,1,-144\n\t" /* expand stack frame */ \
3789 /* arg12 */ \
3790 "ld 3,96(12)\n\t" \
3791 "std 3,120(1)\n\t" \
3792 /* arg11 */ \
3793 "ld 3,88(12)\n\t" \
3794 "std 3,112(1)\n\t" \
3795 /* arg10 */ \
3796 "ld 3,80(12)\n\t" \
3797 "std 3,104(1)\n\t" \
3798 /* arg9 */ \
3799 "ld 3,72(12)\n\t" \
3800 "std 3,96(1)\n\t" \
3801 /* args1-8 */ \
3802 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3803 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3804 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3805 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3806 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3807 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3808 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3809 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3810 "ld 12, 0(12)\n\t" /* target->r12 */ \
3811 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3812 "mr 12,%1\n\t" \
3813 "mr %0,3\n\t" \
3814 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3815 VALGRIND_RESTORE_STACK \
3816 : /*out*/ "=r" (_res) \
3817 : /*in*/ "r" (&_argvec[2]) \
3818 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3819 ); \
3820 lval = (__typeof__(lval)) _res; \
3821 } while (0)
3823 #endif /* PLAT_ppc64le_linux */
3825 /* ------------------------- arm-linux ------------------------- */
3827 #if defined(PLAT_arm_linux)
3829 /* These regs are trashed by the hidden call. */
3830 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3832 /* Macros to save and align the stack before making a function
3833 call and restore it afterwards as gcc may not keep the stack
3834 pointer aligned if it doesn't realise calls are being made
3835 to other functions. */
3837 /* This is a bit tricky. We store the original stack pointer in r10
3838 as it is callee-saves. gcc doesn't allow the use of r11 for some
3839 reason. Also, we can't directly "bic" the stack pointer in thumb
3840 mode since r13 isn't an allowed register number in that context.
3841 So use r4 as a temporary, since that is about to get trashed
3842 anyway, just after each use of this macro. Side effect is we need
3843 to be very careful about any future changes, since
3844 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3845 #define VALGRIND_ALIGN_STACK \
3846 "mov r10, sp\n\t" \
3847 "mov r4, sp\n\t" \
3848 "bic r4, r4, #7\n\t" \
3849 "mov sp, r4\n\t"
3850 #define VALGRIND_RESTORE_STACK \
3851 "mov sp, r10\n\t"
3853 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3854 long) == 4. */
3856 #define CALL_FN_W_v(lval, orig) \
3857 do { \
3858 volatile OrigFn _orig = (orig); \
3859 volatile unsigned long _argvec[1]; \
3860 volatile unsigned long _res; \
3861 _argvec[0] = (unsigned long)_orig.nraddr; \
3862 __asm__ volatile( \
3863 VALGRIND_ALIGN_STACK \
3864 "ldr r4, [%1] \n\t" /* target->r4 */ \
3865 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3866 VALGRIND_RESTORE_STACK \
3867 "mov %0, r0\n" \
3868 : /*out*/ "=r" (_res) \
3869 : /*in*/ "0" (&_argvec[0]) \
3870 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3871 ); \
3872 lval = (__typeof__(lval)) _res; \
3873 } while (0)
3875 #define CALL_FN_W_W(lval, orig, arg1) \
3876 do { \
3877 volatile OrigFn _orig = (orig); \
3878 volatile unsigned long _argvec[2]; \
3879 volatile unsigned long _res; \
3880 _argvec[0] = (unsigned long)_orig.nraddr; \
3881 _argvec[1] = (unsigned long)(arg1); \
3882 __asm__ volatile( \
3883 VALGRIND_ALIGN_STACK \
3884 "ldr r0, [%1, #4] \n\t" \
3885 "ldr r4, [%1] \n\t" /* target->r4 */ \
3886 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3887 VALGRIND_RESTORE_STACK \
3888 "mov %0, r0\n" \
3889 : /*out*/ "=r" (_res) \
3890 : /*in*/ "0" (&_argvec[0]) \
3891 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3892 ); \
3893 lval = (__typeof__(lval)) _res; \
3894 } while (0)
3896 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3897 do { \
3898 volatile OrigFn _orig = (orig); \
3899 volatile unsigned long _argvec[3]; \
3900 volatile unsigned long _res; \
3901 _argvec[0] = (unsigned long)_orig.nraddr; \
3902 _argvec[1] = (unsigned long)(arg1); \
3903 _argvec[2] = (unsigned long)(arg2); \
3904 __asm__ volatile( \
3905 VALGRIND_ALIGN_STACK \
3906 "ldr r0, [%1, #4] \n\t" \
3907 "ldr r1, [%1, #8] \n\t" \
3908 "ldr r4, [%1] \n\t" /* target->r4 */ \
3909 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3910 VALGRIND_RESTORE_STACK \
3911 "mov %0, r0\n" \
3912 : /*out*/ "=r" (_res) \
3913 : /*in*/ "0" (&_argvec[0]) \
3914 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3915 ); \
3916 lval = (__typeof__(lval)) _res; \
3917 } while (0)
3919 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3920 do { \
3921 volatile OrigFn _orig = (orig); \
3922 volatile unsigned long _argvec[4]; \
3923 volatile unsigned long _res; \
3924 _argvec[0] = (unsigned long)_orig.nraddr; \
3925 _argvec[1] = (unsigned long)(arg1); \
3926 _argvec[2] = (unsigned long)(arg2); \
3927 _argvec[3] = (unsigned long)(arg3); \
3928 __asm__ volatile( \
3929 VALGRIND_ALIGN_STACK \
3930 "ldr r0, [%1, #4] \n\t" \
3931 "ldr r1, [%1, #8] \n\t" \
3932 "ldr r2, [%1, #12] \n\t" \
3933 "ldr r4, [%1] \n\t" /* target->r4 */ \
3934 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3935 VALGRIND_RESTORE_STACK \
3936 "mov %0, r0\n" \
3937 : /*out*/ "=r" (_res) \
3938 : /*in*/ "0" (&_argvec[0]) \
3939 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3940 ); \
3941 lval = (__typeof__(lval)) _res; \
3942 } while (0)
3944 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3945 do { \
3946 volatile OrigFn _orig = (orig); \
3947 volatile unsigned long _argvec[5]; \
3948 volatile unsigned long _res; \
3949 _argvec[0] = (unsigned long)_orig.nraddr; \
3950 _argvec[1] = (unsigned long)(arg1); \
3951 _argvec[2] = (unsigned long)(arg2); \
3952 _argvec[3] = (unsigned long)(arg3); \
3953 _argvec[4] = (unsigned long)(arg4); \
3954 __asm__ volatile( \
3955 VALGRIND_ALIGN_STACK \
3956 "ldr r0, [%1, #4] \n\t" \
3957 "ldr r1, [%1, #8] \n\t" \
3958 "ldr r2, [%1, #12] \n\t" \
3959 "ldr r3, [%1, #16] \n\t" \
3960 "ldr r4, [%1] \n\t" /* target->r4 */ \
3961 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3962 VALGRIND_RESTORE_STACK \
3963 "mov %0, r0" \
3964 : /*out*/ "=r" (_res) \
3965 : /*in*/ "0" (&_argvec[0]) \
3966 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3967 ); \
3968 lval = (__typeof__(lval)) _res; \
3969 } while (0)
3971 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3972 do { \
3973 volatile OrigFn _orig = (orig); \
3974 volatile unsigned long _argvec[6]; \
3975 volatile unsigned long _res; \
3976 _argvec[0] = (unsigned long)_orig.nraddr; \
3977 _argvec[1] = (unsigned long)(arg1); \
3978 _argvec[2] = (unsigned long)(arg2); \
3979 _argvec[3] = (unsigned long)(arg3); \
3980 _argvec[4] = (unsigned long)(arg4); \
3981 _argvec[5] = (unsigned long)(arg5); \
3982 __asm__ volatile( \
3983 VALGRIND_ALIGN_STACK \
3984 "sub sp, sp, #4 \n\t" \
3985 "ldr r0, [%1, #20] \n\t" \
3986 "push {r0} \n\t" \
3987 "ldr r0, [%1, #4] \n\t" \
3988 "ldr r1, [%1, #8] \n\t" \
3989 "ldr r2, [%1, #12] \n\t" \
3990 "ldr r3, [%1, #16] \n\t" \
3991 "ldr r4, [%1] \n\t" /* target->r4 */ \
3992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3993 VALGRIND_RESTORE_STACK \
3994 "mov %0, r0" \
3995 : /*out*/ "=r" (_res) \
3996 : /*in*/ "0" (&_argvec[0]) \
3997 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3998 ); \
3999 lval = (__typeof__(lval)) _res; \
4000 } while (0)
4002 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4003 do { \
4004 volatile OrigFn _orig = (orig); \
4005 volatile unsigned long _argvec[7]; \
4006 volatile unsigned long _res; \
4007 _argvec[0] = (unsigned long)_orig.nraddr; \
4008 _argvec[1] = (unsigned long)(arg1); \
4009 _argvec[2] = (unsigned long)(arg2); \
4010 _argvec[3] = (unsigned long)(arg3); \
4011 _argvec[4] = (unsigned long)(arg4); \
4012 _argvec[5] = (unsigned long)(arg5); \
4013 _argvec[6] = (unsigned long)(arg6); \
4014 __asm__ volatile( \
4015 VALGRIND_ALIGN_STACK \
4016 "ldr r0, [%1, #20] \n\t" \
4017 "ldr r1, [%1, #24] \n\t" \
4018 "push {r0, r1} \n\t" \
4019 "ldr r0, [%1, #4] \n\t" \
4020 "ldr r1, [%1, #8] \n\t" \
4021 "ldr r2, [%1, #12] \n\t" \
4022 "ldr r3, [%1, #16] \n\t" \
4023 "ldr r4, [%1] \n\t" /* target->r4 */ \
4024 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4025 VALGRIND_RESTORE_STACK \
4026 "mov %0, r0" \
4027 : /*out*/ "=r" (_res) \
4028 : /*in*/ "0" (&_argvec[0]) \
4029 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4030 ); \
4031 lval = (__typeof__(lval)) _res; \
4032 } while (0)
4034 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4035 arg7) \
4036 do { \
4037 volatile OrigFn _orig = (orig); \
4038 volatile unsigned long _argvec[8]; \
4039 volatile unsigned long _res; \
4040 _argvec[0] = (unsigned long)_orig.nraddr; \
4041 _argvec[1] = (unsigned long)(arg1); \
4042 _argvec[2] = (unsigned long)(arg2); \
4043 _argvec[3] = (unsigned long)(arg3); \
4044 _argvec[4] = (unsigned long)(arg4); \
4045 _argvec[5] = (unsigned long)(arg5); \
4046 _argvec[6] = (unsigned long)(arg6); \
4047 _argvec[7] = (unsigned long)(arg7); \
4048 __asm__ volatile( \
4049 VALGRIND_ALIGN_STACK \
4050 "sub sp, sp, #4 \n\t" \
4051 "ldr r0, [%1, #20] \n\t" \
4052 "ldr r1, [%1, #24] \n\t" \
4053 "ldr r2, [%1, #28] \n\t" \
4054 "push {r0, r1, r2} \n\t" \
4055 "ldr r0, [%1, #4] \n\t" \
4056 "ldr r1, [%1, #8] \n\t" \
4057 "ldr r2, [%1, #12] \n\t" \
4058 "ldr r3, [%1, #16] \n\t" \
4059 "ldr r4, [%1] \n\t" /* target->r4 */ \
4060 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4061 VALGRIND_RESTORE_STACK \
4062 "mov %0, r0" \
4063 : /*out*/ "=r" (_res) \
4064 : /*in*/ "0" (&_argvec[0]) \
4065 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4066 ); \
4067 lval = (__typeof__(lval)) _res; \
4068 } while (0)
4070 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4071 arg7,arg8) \
4072 do { \
4073 volatile OrigFn _orig = (orig); \
4074 volatile unsigned long _argvec[9]; \
4075 volatile unsigned long _res; \
4076 _argvec[0] = (unsigned long)_orig.nraddr; \
4077 _argvec[1] = (unsigned long)(arg1); \
4078 _argvec[2] = (unsigned long)(arg2); \
4079 _argvec[3] = (unsigned long)(arg3); \
4080 _argvec[4] = (unsigned long)(arg4); \
4081 _argvec[5] = (unsigned long)(arg5); \
4082 _argvec[6] = (unsigned long)(arg6); \
4083 _argvec[7] = (unsigned long)(arg7); \
4084 _argvec[8] = (unsigned long)(arg8); \
4085 __asm__ volatile( \
4086 VALGRIND_ALIGN_STACK \
4087 "ldr r0, [%1, #20] \n\t" \
4088 "ldr r1, [%1, #24] \n\t" \
4089 "ldr r2, [%1, #28] \n\t" \
4090 "ldr r3, [%1, #32] \n\t" \
4091 "push {r0, r1, r2, r3} \n\t" \
4092 "ldr r0, [%1, #4] \n\t" \
4093 "ldr r1, [%1, #8] \n\t" \
4094 "ldr r2, [%1, #12] \n\t" \
4095 "ldr r3, [%1, #16] \n\t" \
4096 "ldr r4, [%1] \n\t" /* target->r4 */ \
4097 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4098 VALGRIND_RESTORE_STACK \
4099 "mov %0, r0" \
4100 : /*out*/ "=r" (_res) \
4101 : /*in*/ "0" (&_argvec[0]) \
4102 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4103 ); \
4104 lval = (__typeof__(lval)) _res; \
4105 } while (0)
4107 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4108 arg7,arg8,arg9) \
4109 do { \
4110 volatile OrigFn _orig = (orig); \
4111 volatile unsigned long _argvec[10]; \
4112 volatile unsigned long _res; \
4113 _argvec[0] = (unsigned long)_orig.nraddr; \
4114 _argvec[1] = (unsigned long)(arg1); \
4115 _argvec[2] = (unsigned long)(arg2); \
4116 _argvec[3] = (unsigned long)(arg3); \
4117 _argvec[4] = (unsigned long)(arg4); \
4118 _argvec[5] = (unsigned long)(arg5); \
4119 _argvec[6] = (unsigned long)(arg6); \
4120 _argvec[7] = (unsigned long)(arg7); \
4121 _argvec[8] = (unsigned long)(arg8); \
4122 _argvec[9] = (unsigned long)(arg9); \
4123 __asm__ volatile( \
4124 VALGRIND_ALIGN_STACK \
4125 "sub sp, sp, #4 \n\t" \
4126 "ldr r0, [%1, #20] \n\t" \
4127 "ldr r1, [%1, #24] \n\t" \
4128 "ldr r2, [%1, #28] \n\t" \
4129 "ldr r3, [%1, #32] \n\t" \
4130 "ldr r4, [%1, #36] \n\t" \
4131 "push {r0, r1, r2, r3, r4} \n\t" \
4132 "ldr r0, [%1, #4] \n\t" \
4133 "ldr r1, [%1, #8] \n\t" \
4134 "ldr r2, [%1, #12] \n\t" \
4135 "ldr r3, [%1, #16] \n\t" \
4136 "ldr r4, [%1] \n\t" /* target->r4 */ \
4137 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4138 VALGRIND_RESTORE_STACK \
4139 "mov %0, r0" \
4140 : /*out*/ "=r" (_res) \
4141 : /*in*/ "0" (&_argvec[0]) \
4142 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4143 ); \
4144 lval = (__typeof__(lval)) _res; \
4145 } while (0)
4147 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4148 arg7,arg8,arg9,arg10) \
4149 do { \
4150 volatile OrigFn _orig = (orig); \
4151 volatile unsigned long _argvec[11]; \
4152 volatile unsigned long _res; \
4153 _argvec[0] = (unsigned long)_orig.nraddr; \
4154 _argvec[1] = (unsigned long)(arg1); \
4155 _argvec[2] = (unsigned long)(arg2); \
4156 _argvec[3] = (unsigned long)(arg3); \
4157 _argvec[4] = (unsigned long)(arg4); \
4158 _argvec[5] = (unsigned long)(arg5); \
4159 _argvec[6] = (unsigned long)(arg6); \
4160 _argvec[7] = (unsigned long)(arg7); \
4161 _argvec[8] = (unsigned long)(arg8); \
4162 _argvec[9] = (unsigned long)(arg9); \
4163 _argvec[10] = (unsigned long)(arg10); \
4164 __asm__ volatile( \
4165 VALGRIND_ALIGN_STACK \
4166 "ldr r0, [%1, #40] \n\t" \
4167 "push {r0} \n\t" \
4168 "ldr r0, [%1, #20] \n\t" \
4169 "ldr r1, [%1, #24] \n\t" \
4170 "ldr r2, [%1, #28] \n\t" \
4171 "ldr r3, [%1, #32] \n\t" \
4172 "ldr r4, [%1, #36] \n\t" \
4173 "push {r0, r1, r2, r3, r4} \n\t" \
4174 "ldr r0, [%1, #4] \n\t" \
4175 "ldr r1, [%1, #8] \n\t" \
4176 "ldr r2, [%1, #12] \n\t" \
4177 "ldr r3, [%1, #16] \n\t" \
4178 "ldr r4, [%1] \n\t" /* target->r4 */ \
4179 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4180 VALGRIND_RESTORE_STACK \
4181 "mov %0, r0" \
4182 : /*out*/ "=r" (_res) \
4183 : /*in*/ "0" (&_argvec[0]) \
4184 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4185 ); \
4186 lval = (__typeof__(lval)) _res; \
4187 } while (0)
4189 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4190 arg6,arg7,arg8,arg9,arg10, \
4191 arg11) \
4192 do { \
4193 volatile OrigFn _orig = (orig); \
4194 volatile unsigned long _argvec[12]; \
4195 volatile unsigned long _res; \
4196 _argvec[0] = (unsigned long)_orig.nraddr; \
4197 _argvec[1] = (unsigned long)(arg1); \
4198 _argvec[2] = (unsigned long)(arg2); \
4199 _argvec[3] = (unsigned long)(arg3); \
4200 _argvec[4] = (unsigned long)(arg4); \
4201 _argvec[5] = (unsigned long)(arg5); \
4202 _argvec[6] = (unsigned long)(arg6); \
4203 _argvec[7] = (unsigned long)(arg7); \
4204 _argvec[8] = (unsigned long)(arg8); \
4205 _argvec[9] = (unsigned long)(arg9); \
4206 _argvec[10] = (unsigned long)(arg10); \
4207 _argvec[11] = (unsigned long)(arg11); \
4208 __asm__ volatile( \
4209 VALGRIND_ALIGN_STACK \
4210 "sub sp, sp, #4 \n\t" \
4211 "ldr r0, [%1, #40] \n\t" \
4212 "ldr r1, [%1, #44] \n\t" \
4213 "push {r0, r1} \n\t" \
4214 "ldr r0, [%1, #20] \n\t" \
4215 "ldr r1, [%1, #24] \n\t" \
4216 "ldr r2, [%1, #28] \n\t" \
4217 "ldr r3, [%1, #32] \n\t" \
4218 "ldr r4, [%1, #36] \n\t" \
4219 "push {r0, r1, r2, r3, r4} \n\t" \
4220 "ldr r0, [%1, #4] \n\t" \
4221 "ldr r1, [%1, #8] \n\t" \
4222 "ldr r2, [%1, #12] \n\t" \
4223 "ldr r3, [%1, #16] \n\t" \
4224 "ldr r4, [%1] \n\t" /* target->r4 */ \
4225 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4226 VALGRIND_RESTORE_STACK \
4227 "mov %0, r0" \
4228 : /*out*/ "=r" (_res) \
4229 : /*in*/ "0" (&_argvec[0]) \
4230 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4231 ); \
4232 lval = (__typeof__(lval)) _res; \
4233 } while (0)
4235 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4236 arg6,arg7,arg8,arg9,arg10, \
4237 arg11,arg12) \
4238 do { \
4239 volatile OrigFn _orig = (orig); \
4240 volatile unsigned long _argvec[13]; \
4241 volatile unsigned long _res; \
4242 _argvec[0] = (unsigned long)_orig.nraddr; \
4243 _argvec[1] = (unsigned long)(arg1); \
4244 _argvec[2] = (unsigned long)(arg2); \
4245 _argvec[3] = (unsigned long)(arg3); \
4246 _argvec[4] = (unsigned long)(arg4); \
4247 _argvec[5] = (unsigned long)(arg5); \
4248 _argvec[6] = (unsigned long)(arg6); \
4249 _argvec[7] = (unsigned long)(arg7); \
4250 _argvec[8] = (unsigned long)(arg8); \
4251 _argvec[9] = (unsigned long)(arg9); \
4252 _argvec[10] = (unsigned long)(arg10); \
4253 _argvec[11] = (unsigned long)(arg11); \
4254 _argvec[12] = (unsigned long)(arg12); \
4255 __asm__ volatile( \
4256 VALGRIND_ALIGN_STACK \
4257 "ldr r0, [%1, #40] \n\t" \
4258 "ldr r1, [%1, #44] \n\t" \
4259 "ldr r2, [%1, #48] \n\t" \
4260 "push {r0, r1, r2} \n\t" \
4261 "ldr r0, [%1, #20] \n\t" \
4262 "ldr r1, [%1, #24] \n\t" \
4263 "ldr r2, [%1, #28] \n\t" \
4264 "ldr r3, [%1, #32] \n\t" \
4265 "ldr r4, [%1, #36] \n\t" \
4266 "push {r0, r1, r2, r3, r4} \n\t" \
4267 "ldr r0, [%1, #4] \n\t" \
4268 "ldr r1, [%1, #8] \n\t" \
4269 "ldr r2, [%1, #12] \n\t" \
4270 "ldr r3, [%1, #16] \n\t" \
4271 "ldr r4, [%1] \n\t" /* target->r4 */ \
4272 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4273 VALGRIND_RESTORE_STACK \
4274 "mov %0, r0" \
4275 : /*out*/ "=r" (_res) \
4276 : /*in*/ "0" (&_argvec[0]) \
4277 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4278 ); \
4279 lval = (__typeof__(lval)) _res; \
4280 } while (0)
4282 #endif /* PLAT_arm_linux */
4284 /* ------------------------ arm64-linux ------------------------ */
4286 #if defined(PLAT_arm64_linux)
4288 /* These regs are trashed by the hidden call. */
4289 #define __CALLER_SAVED_REGS \
4290 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4291 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4292 "x18", "x19", "x20", "x30", \
4293 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4294 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4295 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4296 "v26", "v27", "v28", "v29", "v30", "v31"
4298 /* x21 is callee-saved, so we can use it to save and restore SP around
4299 the hidden call. */
4300 #define VALGRIND_ALIGN_STACK \
4301 "mov x21, sp\n\t" \
4302 "bic sp, x21, #15\n\t"
4303 #define VALGRIND_RESTORE_STACK \
4304 "mov sp, x21\n\t"
4306 /* These CALL_FN_ macros assume that on arm64-linux,
4307 sizeof(unsigned long) == 8. */
4309 #define CALL_FN_W_v(lval, orig) \
4310 do { \
4311 volatile OrigFn _orig = (orig); \
4312 volatile unsigned long _argvec[1]; \
4313 volatile unsigned long _res; \
4314 _argvec[0] = (unsigned long)_orig.nraddr; \
4315 __asm__ volatile( \
4316 VALGRIND_ALIGN_STACK \
4317 "ldr x8, [%1] \n\t" /* target->x8 */ \
4318 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4319 VALGRIND_RESTORE_STACK \
4320 "mov %0, x0\n" \
4321 : /*out*/ "=r" (_res) \
4322 : /*in*/ "0" (&_argvec[0]) \
4323 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4324 ); \
4325 lval = (__typeof__(lval)) _res; \
4326 } while (0)
4328 #define CALL_FN_W_W(lval, orig, arg1) \
4329 do { \
4330 volatile OrigFn _orig = (orig); \
4331 volatile unsigned long _argvec[2]; \
4332 volatile unsigned long _res; \
4333 _argvec[0] = (unsigned long)_orig.nraddr; \
4334 _argvec[1] = (unsigned long)(arg1); \
4335 __asm__ volatile( \
4336 VALGRIND_ALIGN_STACK \
4337 "ldr x0, [%1, #8] \n\t" \
4338 "ldr x8, [%1] \n\t" /* target->x8 */ \
4339 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4340 VALGRIND_RESTORE_STACK \
4341 "mov %0, x0\n" \
4342 : /*out*/ "=r" (_res) \
4343 : /*in*/ "0" (&_argvec[0]) \
4344 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4345 ); \
4346 lval = (__typeof__(lval)) _res; \
4347 } while (0)
4349 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4350 do { \
4351 volatile OrigFn _orig = (orig); \
4352 volatile unsigned long _argvec[3]; \
4353 volatile unsigned long _res; \
4354 _argvec[0] = (unsigned long)_orig.nraddr; \
4355 _argvec[1] = (unsigned long)(arg1); \
4356 _argvec[2] = (unsigned long)(arg2); \
4357 __asm__ volatile( \
4358 VALGRIND_ALIGN_STACK \
4359 "ldr x0, [%1, #8] \n\t" \
4360 "ldr x1, [%1, #16] \n\t" \
4361 "ldr x8, [%1] \n\t" /* target->x8 */ \
4362 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4363 VALGRIND_RESTORE_STACK \
4364 "mov %0, x0\n" \
4365 : /*out*/ "=r" (_res) \
4366 : /*in*/ "0" (&_argvec[0]) \
4367 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4368 ); \
4369 lval = (__typeof__(lval)) _res; \
4370 } while (0)
4372 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4373 do { \
4374 volatile OrigFn _orig = (orig); \
4375 volatile unsigned long _argvec[4]; \
4376 volatile unsigned long _res; \
4377 _argvec[0] = (unsigned long)_orig.nraddr; \
4378 _argvec[1] = (unsigned long)(arg1); \
4379 _argvec[2] = (unsigned long)(arg2); \
4380 _argvec[3] = (unsigned long)(arg3); \
4381 __asm__ volatile( \
4382 VALGRIND_ALIGN_STACK \
4383 "ldr x0, [%1, #8] \n\t" \
4384 "ldr x1, [%1, #16] \n\t" \
4385 "ldr x2, [%1, #24] \n\t" \
4386 "ldr x8, [%1] \n\t" /* target->x8 */ \
4387 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4388 VALGRIND_RESTORE_STACK \
4389 "mov %0, x0\n" \
4390 : /*out*/ "=r" (_res) \
4391 : /*in*/ "0" (&_argvec[0]) \
4392 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4393 ); \
4394 lval = (__typeof__(lval)) _res; \
4395 } while (0)
4397 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4398 do { \
4399 volatile OrigFn _orig = (orig); \
4400 volatile unsigned long _argvec[5]; \
4401 volatile unsigned long _res; \
4402 _argvec[0] = (unsigned long)_orig.nraddr; \
4403 _argvec[1] = (unsigned long)(arg1); \
4404 _argvec[2] = (unsigned long)(arg2); \
4405 _argvec[3] = (unsigned long)(arg3); \
4406 _argvec[4] = (unsigned long)(arg4); \
4407 __asm__ volatile( \
4408 VALGRIND_ALIGN_STACK \
4409 "ldr x0, [%1, #8] \n\t" \
4410 "ldr x1, [%1, #16] \n\t" \
4411 "ldr x2, [%1, #24] \n\t" \
4412 "ldr x3, [%1, #32] \n\t" \
4413 "ldr x8, [%1] \n\t" /* target->x8 */ \
4414 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4415 VALGRIND_RESTORE_STACK \
4416 "mov %0, x0" \
4417 : /*out*/ "=r" (_res) \
4418 : /*in*/ "0" (&_argvec[0]) \
4419 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4420 ); \
4421 lval = (__typeof__(lval)) _res; \
4422 } while (0)
4424 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4425 do { \
4426 volatile OrigFn _orig = (orig); \
4427 volatile unsigned long _argvec[6]; \
4428 volatile unsigned long _res; \
4429 _argvec[0] = (unsigned long)_orig.nraddr; \
4430 _argvec[1] = (unsigned long)(arg1); \
4431 _argvec[2] = (unsigned long)(arg2); \
4432 _argvec[3] = (unsigned long)(arg3); \
4433 _argvec[4] = (unsigned long)(arg4); \
4434 _argvec[5] = (unsigned long)(arg5); \
4435 __asm__ volatile( \
4436 VALGRIND_ALIGN_STACK \
4437 "ldr x0, [%1, #8] \n\t" \
4438 "ldr x1, [%1, #16] \n\t" \
4439 "ldr x2, [%1, #24] \n\t" \
4440 "ldr x3, [%1, #32] \n\t" \
4441 "ldr x4, [%1, #40] \n\t" \
4442 "ldr x8, [%1] \n\t" /* target->x8 */ \
4443 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4444 VALGRIND_RESTORE_STACK \
4445 "mov %0, x0" \
4446 : /*out*/ "=r" (_res) \
4447 : /*in*/ "0" (&_argvec[0]) \
4448 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4449 ); \
4450 lval = (__typeof__(lval)) _res; \
4451 } while (0)
4453 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4454 do { \
4455 volatile OrigFn _orig = (orig); \
4456 volatile unsigned long _argvec[7]; \
4457 volatile unsigned long _res; \
4458 _argvec[0] = (unsigned long)_orig.nraddr; \
4459 _argvec[1] = (unsigned long)(arg1); \
4460 _argvec[2] = (unsigned long)(arg2); \
4461 _argvec[3] = (unsigned long)(arg3); \
4462 _argvec[4] = (unsigned long)(arg4); \
4463 _argvec[5] = (unsigned long)(arg5); \
4464 _argvec[6] = (unsigned long)(arg6); \
4465 __asm__ volatile( \
4466 VALGRIND_ALIGN_STACK \
4467 "ldr x0, [%1, #8] \n\t" \
4468 "ldr x1, [%1, #16] \n\t" \
4469 "ldr x2, [%1, #24] \n\t" \
4470 "ldr x3, [%1, #32] \n\t" \
4471 "ldr x4, [%1, #40] \n\t" \
4472 "ldr x5, [%1, #48] \n\t" \
4473 "ldr x8, [%1] \n\t" /* target->x8 */ \
4474 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4475 VALGRIND_RESTORE_STACK \
4476 "mov %0, x0" \
4477 : /*out*/ "=r" (_res) \
4478 : /*in*/ "0" (&_argvec[0]) \
4479 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4480 ); \
4481 lval = (__typeof__(lval)) _res; \
4482 } while (0)
4484 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4485 arg7) \
4486 do { \
4487 volatile OrigFn _orig = (orig); \
4488 volatile unsigned long _argvec[8]; \
4489 volatile unsigned long _res; \
4490 _argvec[0] = (unsigned long)_orig.nraddr; \
4491 _argvec[1] = (unsigned long)(arg1); \
4492 _argvec[2] = (unsigned long)(arg2); \
4493 _argvec[3] = (unsigned long)(arg3); \
4494 _argvec[4] = (unsigned long)(arg4); \
4495 _argvec[5] = (unsigned long)(arg5); \
4496 _argvec[6] = (unsigned long)(arg6); \
4497 _argvec[7] = (unsigned long)(arg7); \
4498 __asm__ volatile( \
4499 VALGRIND_ALIGN_STACK \
4500 "ldr x0, [%1, #8] \n\t" \
4501 "ldr x1, [%1, #16] \n\t" \
4502 "ldr x2, [%1, #24] \n\t" \
4503 "ldr x3, [%1, #32] \n\t" \
4504 "ldr x4, [%1, #40] \n\t" \
4505 "ldr x5, [%1, #48] \n\t" \
4506 "ldr x6, [%1, #56] \n\t" \
4507 "ldr x8, [%1] \n\t" /* target->x8 */ \
4508 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4509 VALGRIND_RESTORE_STACK \
4510 "mov %0, x0" \
4511 : /*out*/ "=r" (_res) \
4512 : /*in*/ "0" (&_argvec[0]) \
4513 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4514 ); \
4515 lval = (__typeof__(lval)) _res; \
4516 } while (0)
4518 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4519 arg7,arg8) \
4520 do { \
4521 volatile OrigFn _orig = (orig); \
4522 volatile unsigned long _argvec[9]; \
4523 volatile unsigned long _res; \
4524 _argvec[0] = (unsigned long)_orig.nraddr; \
4525 _argvec[1] = (unsigned long)(arg1); \
4526 _argvec[2] = (unsigned long)(arg2); \
4527 _argvec[3] = (unsigned long)(arg3); \
4528 _argvec[4] = (unsigned long)(arg4); \
4529 _argvec[5] = (unsigned long)(arg5); \
4530 _argvec[6] = (unsigned long)(arg6); \
4531 _argvec[7] = (unsigned long)(arg7); \
4532 _argvec[8] = (unsigned long)(arg8); \
4533 __asm__ volatile( \
4534 VALGRIND_ALIGN_STACK \
4535 "ldr x0, [%1, #8] \n\t" \
4536 "ldr x1, [%1, #16] \n\t" \
4537 "ldr x2, [%1, #24] \n\t" \
4538 "ldr x3, [%1, #32] \n\t" \
4539 "ldr x4, [%1, #40] \n\t" \
4540 "ldr x5, [%1, #48] \n\t" \
4541 "ldr x6, [%1, #56] \n\t" \
4542 "ldr x7, [%1, #64] \n\t" \
4543 "ldr x8, [%1] \n\t" /* target->x8 */ \
4544 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4545 VALGRIND_RESTORE_STACK \
4546 "mov %0, x0" \
4547 : /*out*/ "=r" (_res) \
4548 : /*in*/ "0" (&_argvec[0]) \
4549 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4550 ); \
4551 lval = (__typeof__(lval)) _res; \
4552 } while (0)
4554 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4555 arg7,arg8,arg9) \
4556 do { \
4557 volatile OrigFn _orig = (orig); \
4558 volatile unsigned long _argvec[10]; \
4559 volatile unsigned long _res; \
4560 _argvec[0] = (unsigned long)_orig.nraddr; \
4561 _argvec[1] = (unsigned long)(arg1); \
4562 _argvec[2] = (unsigned long)(arg2); \
4563 _argvec[3] = (unsigned long)(arg3); \
4564 _argvec[4] = (unsigned long)(arg4); \
4565 _argvec[5] = (unsigned long)(arg5); \
4566 _argvec[6] = (unsigned long)(arg6); \
4567 _argvec[7] = (unsigned long)(arg7); \
4568 _argvec[8] = (unsigned long)(arg8); \
4569 _argvec[9] = (unsigned long)(arg9); \
4570 __asm__ volatile( \
4571 VALGRIND_ALIGN_STACK \
4572 "sub sp, sp, #0x20 \n\t" \
4573 "ldr x0, [%1, #8] \n\t" \
4574 "ldr x1, [%1, #16] \n\t" \
4575 "ldr x2, [%1, #24] \n\t" \
4576 "ldr x3, [%1, #32] \n\t" \
4577 "ldr x4, [%1, #40] \n\t" \
4578 "ldr x5, [%1, #48] \n\t" \
4579 "ldr x6, [%1, #56] \n\t" \
4580 "ldr x7, [%1, #64] \n\t" \
4581 "ldr x8, [%1, #72] \n\t" \
4582 "str x8, [sp, #0] \n\t" \
4583 "ldr x8, [%1] \n\t" /* target->x8 */ \
4584 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4585 VALGRIND_RESTORE_STACK \
4586 "mov %0, x0" \
4587 : /*out*/ "=r" (_res) \
4588 : /*in*/ "0" (&_argvec[0]) \
4589 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4590 ); \
4591 lval = (__typeof__(lval)) _res; \
4592 } while (0)
4594 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4595 arg7,arg8,arg9,arg10) \
4596 do { \
4597 volatile OrigFn _orig = (orig); \
4598 volatile unsigned long _argvec[11]; \
4599 volatile unsigned long _res; \
4600 _argvec[0] = (unsigned long)_orig.nraddr; \
4601 _argvec[1] = (unsigned long)(arg1); \
4602 _argvec[2] = (unsigned long)(arg2); \
4603 _argvec[3] = (unsigned long)(arg3); \
4604 _argvec[4] = (unsigned long)(arg4); \
4605 _argvec[5] = (unsigned long)(arg5); \
4606 _argvec[6] = (unsigned long)(arg6); \
4607 _argvec[7] = (unsigned long)(arg7); \
4608 _argvec[8] = (unsigned long)(arg8); \
4609 _argvec[9] = (unsigned long)(arg9); \
4610 _argvec[10] = (unsigned long)(arg10); \
4611 __asm__ volatile( \
4612 VALGRIND_ALIGN_STACK \
4613 "sub sp, sp, #0x20 \n\t" \
4614 "ldr x0, [%1, #8] \n\t" \
4615 "ldr x1, [%1, #16] \n\t" \
4616 "ldr x2, [%1, #24] \n\t" \
4617 "ldr x3, [%1, #32] \n\t" \
4618 "ldr x4, [%1, #40] \n\t" \
4619 "ldr x5, [%1, #48] \n\t" \
4620 "ldr x6, [%1, #56] \n\t" \
4621 "ldr x7, [%1, #64] \n\t" \
4622 "ldr x8, [%1, #72] \n\t" \
4623 "str x8, [sp, #0] \n\t" \
4624 "ldr x8, [%1, #80] \n\t" \
4625 "str x8, [sp, #8] \n\t" \
4626 "ldr x8, [%1] \n\t" /* target->x8 */ \
4627 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4628 VALGRIND_RESTORE_STACK \
4629 "mov %0, x0" \
4630 : /*out*/ "=r" (_res) \
4631 : /*in*/ "0" (&_argvec[0]) \
4632 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4633 ); \
4634 lval = (__typeof__(lval)) _res; \
4635 } while (0)
4637 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4638 arg7,arg8,arg9,arg10,arg11) \
4639 do { \
4640 volatile OrigFn _orig = (orig); \
4641 volatile unsigned long _argvec[12]; \
4642 volatile unsigned long _res; \
4643 _argvec[0] = (unsigned long)_orig.nraddr; \
4644 _argvec[1] = (unsigned long)(arg1); \
4645 _argvec[2] = (unsigned long)(arg2); \
4646 _argvec[3] = (unsigned long)(arg3); \
4647 _argvec[4] = (unsigned long)(arg4); \
4648 _argvec[5] = (unsigned long)(arg5); \
4649 _argvec[6] = (unsigned long)(arg6); \
4650 _argvec[7] = (unsigned long)(arg7); \
4651 _argvec[8] = (unsigned long)(arg8); \
4652 _argvec[9] = (unsigned long)(arg9); \
4653 _argvec[10] = (unsigned long)(arg10); \
4654 _argvec[11] = (unsigned long)(arg11); \
4655 __asm__ volatile( \
4656 VALGRIND_ALIGN_STACK \
4657 "sub sp, sp, #0x30 \n\t" \
4658 "ldr x0, [%1, #8] \n\t" \
4659 "ldr x1, [%1, #16] \n\t" \
4660 "ldr x2, [%1, #24] \n\t" \
4661 "ldr x3, [%1, #32] \n\t" \
4662 "ldr x4, [%1, #40] \n\t" \
4663 "ldr x5, [%1, #48] \n\t" \
4664 "ldr x6, [%1, #56] \n\t" \
4665 "ldr x7, [%1, #64] \n\t" \
4666 "ldr x8, [%1, #72] \n\t" \
4667 "str x8, [sp, #0] \n\t" \
4668 "ldr x8, [%1, #80] \n\t" \
4669 "str x8, [sp, #8] \n\t" \
4670 "ldr x8, [%1, #88] \n\t" \
4671 "str x8, [sp, #16] \n\t" \
4672 "ldr x8, [%1] \n\t" /* target->x8 */ \
4673 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4674 VALGRIND_RESTORE_STACK \
4675 "mov %0, x0" \
4676 : /*out*/ "=r" (_res) \
4677 : /*in*/ "0" (&_argvec[0]) \
4678 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4679 ); \
4680 lval = (__typeof__(lval)) _res; \
4681 } while (0)
4683 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4684 arg7,arg8,arg9,arg10,arg11, \
4685 arg12) \
4686 do { \
4687 volatile OrigFn _orig = (orig); \
4688 volatile unsigned long _argvec[13]; \
4689 volatile unsigned long _res; \
4690 _argvec[0] = (unsigned long)_orig.nraddr; \
4691 _argvec[1] = (unsigned long)(arg1); \
4692 _argvec[2] = (unsigned long)(arg2); \
4693 _argvec[3] = (unsigned long)(arg3); \
4694 _argvec[4] = (unsigned long)(arg4); \
4695 _argvec[5] = (unsigned long)(arg5); \
4696 _argvec[6] = (unsigned long)(arg6); \
4697 _argvec[7] = (unsigned long)(arg7); \
4698 _argvec[8] = (unsigned long)(arg8); \
4699 _argvec[9] = (unsigned long)(arg9); \
4700 _argvec[10] = (unsigned long)(arg10); \
4701 _argvec[11] = (unsigned long)(arg11); \
4702 _argvec[12] = (unsigned long)(arg12); \
4703 __asm__ volatile( \
4704 VALGRIND_ALIGN_STACK \
4705 "sub sp, sp, #0x30 \n\t" \
4706 "ldr x0, [%1, #8] \n\t" \
4707 "ldr x1, [%1, #16] \n\t" \
4708 "ldr x2, [%1, #24] \n\t" \
4709 "ldr x3, [%1, #32] \n\t" \
4710 "ldr x4, [%1, #40] \n\t" \
4711 "ldr x5, [%1, #48] \n\t" \
4712 "ldr x6, [%1, #56] \n\t" \
4713 "ldr x7, [%1, #64] \n\t" \
4714 "ldr x8, [%1, #72] \n\t" \
4715 "str x8, [sp, #0] \n\t" \
4716 "ldr x8, [%1, #80] \n\t" \
4717 "str x8, [sp, #8] \n\t" \
4718 "ldr x8, [%1, #88] \n\t" \
4719 "str x8, [sp, #16] \n\t" \
4720 "ldr x8, [%1, #96] \n\t" \
4721 "str x8, [sp, #24] \n\t" \
4722 "ldr x8, [%1] \n\t" /* target->x8 */ \
4723 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4724 VALGRIND_RESTORE_STACK \
4725 "mov %0, x0" \
4726 : /*out*/ "=r" (_res) \
4727 : /*in*/ "0" (&_argvec[0]) \
4728 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4729 ); \
4730 lval = (__typeof__(lval)) _res; \
4731 } while (0)
4733 #endif /* PLAT_arm64_linux */
4735 /* ------------------------- s390x-linux ------------------------- */
4737 #if defined(PLAT_s390x_linux)
4739 /* Similar workaround as amd64 (see above), but we use r11 as frame
4740 pointer and save the old r11 in r7. r11 might be used for
4741 argvec, therefore we copy argvec in r1 since r1 is clobbered
4742 after the call anyway. */
4743 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4744 # define __FRAME_POINTER \
4745 ,"d"(__builtin_dwarf_cfa())
4746 # define VALGRIND_CFI_PROLOGUE \
4747 ".cfi_remember_state\n\t" \
4748 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4749 "lgr 7,11\n\t" \
4750 "lgr 11,%2\n\t" \
4751 ".cfi_def_cfa 11, 0\n\t"
4752 # define VALGRIND_CFI_EPILOGUE \
4753 "lgr 11, 7\n\t" \
4754 ".cfi_restore_state\n\t"
4755 #else
4756 # define __FRAME_POINTER
4757 # define VALGRIND_CFI_PROLOGUE \
4758 "lgr 1,%1\n\t"
4759 # define VALGRIND_CFI_EPILOGUE
4760 #endif
4762 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4763 according to the s390 GCC maintainer. (The ABI specification is not
4764 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4765 VALGRIND_RESTORE_STACK are not defined here. */
4767 /* These regs are trashed by the hidden call. Note that we overwrite
4768 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4769 function a proper return address. All others are ABI defined call
4770 clobbers. */
4771 #if defined(__VX__) || defined(__S390_VX__)
4772 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4773 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4774 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4775 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4776 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4777 #else
4778 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4779 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4780 #endif
4782 /* Nb: Although r11 is modified in the asm snippets below (inside
4783 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4784 two reasons:
4785 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4786 modified
4787 (2) GCC will complain that r11 cannot appear inside a clobber section,
4788 when compiled with -O -fno-omit-frame-pointer
4791 #define CALL_FN_W_v(lval, orig) \
4792 do { \
4793 volatile OrigFn _orig = (orig); \
4794 volatile unsigned long _argvec[1]; \
4795 volatile unsigned long _res; \
4796 _argvec[0] = (unsigned long)_orig.nraddr; \
4797 __asm__ volatile( \
4798 VALGRIND_CFI_PROLOGUE \
4799 "aghi 15,-160\n\t" \
4800 "lg 1, 0(1)\n\t" /* target->r1 */ \
4801 VALGRIND_CALL_NOREDIR_R1 \
4802 "aghi 15,160\n\t" \
4803 VALGRIND_CFI_EPILOGUE \
4804 "lgr %0, 2\n\t" \
4805 : /*out*/ "=d" (_res) \
4806 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4808 ); \
4809 lval = (__typeof__(lval)) _res; \
4810 } while (0)
4812 /* The call abi has the arguments in r2-r6 and stack */
4813 #define CALL_FN_W_W(lval, orig, arg1) \
4814 do { \
4815 volatile OrigFn _orig = (orig); \
4816 volatile unsigned long _argvec[2]; \
4817 volatile unsigned long _res; \
4818 _argvec[0] = (unsigned long)_orig.nraddr; \
4819 _argvec[1] = (unsigned long)arg1; \
4820 __asm__ volatile( \
4821 VALGRIND_CFI_PROLOGUE \
4822 "aghi 15,-160\n\t" \
4823 "lg 2, 8(1)\n\t" \
4824 "lg 1, 0(1)\n\t" \
4825 VALGRIND_CALL_NOREDIR_R1 \
4826 "aghi 15,160\n\t" \
4827 VALGRIND_CFI_EPILOGUE \
4828 "lgr %0, 2\n\t" \
4829 : /*out*/ "=d" (_res) \
4830 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4831 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4832 ); \
4833 lval = (__typeof__(lval)) _res; \
4834 } while (0)
4836 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4837 do { \
4838 volatile OrigFn _orig = (orig); \
4839 volatile unsigned long _argvec[3]; \
4840 volatile unsigned long _res; \
4841 _argvec[0] = (unsigned long)_orig.nraddr; \
4842 _argvec[1] = (unsigned long)arg1; \
4843 _argvec[2] = (unsigned long)arg2; \
4844 __asm__ volatile( \
4845 VALGRIND_CFI_PROLOGUE \
4846 "aghi 15,-160\n\t" \
4847 "lg 2, 8(1)\n\t" \
4848 "lg 3,16(1)\n\t" \
4849 "lg 1, 0(1)\n\t" \
4850 VALGRIND_CALL_NOREDIR_R1 \
4851 "aghi 15,160\n\t" \
4852 VALGRIND_CFI_EPILOGUE \
4853 "lgr %0, 2\n\t" \
4854 : /*out*/ "=d" (_res) \
4855 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4856 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4857 ); \
4858 lval = (__typeof__(lval)) _res; \
4859 } while (0)
4861 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4862 do { \
4863 volatile OrigFn _orig = (orig); \
4864 volatile unsigned long _argvec[4]; \
4865 volatile unsigned long _res; \
4866 _argvec[0] = (unsigned long)_orig.nraddr; \
4867 _argvec[1] = (unsigned long)arg1; \
4868 _argvec[2] = (unsigned long)arg2; \
4869 _argvec[3] = (unsigned long)arg3; \
4870 __asm__ volatile( \
4871 VALGRIND_CFI_PROLOGUE \
4872 "aghi 15,-160\n\t" \
4873 "lg 2, 8(1)\n\t" \
4874 "lg 3,16(1)\n\t" \
4875 "lg 4,24(1)\n\t" \
4876 "lg 1, 0(1)\n\t" \
4877 VALGRIND_CALL_NOREDIR_R1 \
4878 "aghi 15,160\n\t" \
4879 VALGRIND_CFI_EPILOGUE \
4880 "lgr %0, 2\n\t" \
4881 : /*out*/ "=d" (_res) \
4882 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4883 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4884 ); \
4885 lval = (__typeof__(lval)) _res; \
4886 } while (0)
4888 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4889 do { \
4890 volatile OrigFn _orig = (orig); \
4891 volatile unsigned long _argvec[5]; \
4892 volatile unsigned long _res; \
4893 _argvec[0] = (unsigned long)_orig.nraddr; \
4894 _argvec[1] = (unsigned long)arg1; \
4895 _argvec[2] = (unsigned long)arg2; \
4896 _argvec[3] = (unsigned long)arg3; \
4897 _argvec[4] = (unsigned long)arg4; \
4898 __asm__ volatile( \
4899 VALGRIND_CFI_PROLOGUE \
4900 "aghi 15,-160\n\t" \
4901 "lg 2, 8(1)\n\t" \
4902 "lg 3,16(1)\n\t" \
4903 "lg 4,24(1)\n\t" \
4904 "lg 5,32(1)\n\t" \
4905 "lg 1, 0(1)\n\t" \
4906 VALGRIND_CALL_NOREDIR_R1 \
4907 "aghi 15,160\n\t" \
4908 VALGRIND_CFI_EPILOGUE \
4909 "lgr %0, 2\n\t" \
4910 : /*out*/ "=d" (_res) \
4911 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4912 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4913 ); \
4914 lval = (__typeof__(lval)) _res; \
4915 } while (0)
4917 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4918 do { \
4919 volatile OrigFn _orig = (orig); \
4920 volatile unsigned long _argvec[6]; \
4921 volatile unsigned long _res; \
4922 _argvec[0] = (unsigned long)_orig.nraddr; \
4923 _argvec[1] = (unsigned long)arg1; \
4924 _argvec[2] = (unsigned long)arg2; \
4925 _argvec[3] = (unsigned long)arg3; \
4926 _argvec[4] = (unsigned long)arg4; \
4927 _argvec[5] = (unsigned long)arg5; \
4928 __asm__ volatile( \
4929 VALGRIND_CFI_PROLOGUE \
4930 "aghi 15,-160\n\t" \
4931 "lg 2, 8(1)\n\t" \
4932 "lg 3,16(1)\n\t" \
4933 "lg 4,24(1)\n\t" \
4934 "lg 5,32(1)\n\t" \
4935 "lg 6,40(1)\n\t" \
4936 "lg 1, 0(1)\n\t" \
4937 VALGRIND_CALL_NOREDIR_R1 \
4938 "aghi 15,160\n\t" \
4939 VALGRIND_CFI_EPILOGUE \
4940 "lgr %0, 2\n\t" \
4941 : /*out*/ "=d" (_res) \
4942 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4943 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4944 ); \
4945 lval = (__typeof__(lval)) _res; \
4946 } while (0)
4948 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4949 arg6) \
4950 do { \
4951 volatile OrigFn _orig = (orig); \
4952 volatile unsigned long _argvec[7]; \
4953 volatile unsigned long _res; \
4954 _argvec[0] = (unsigned long)_orig.nraddr; \
4955 _argvec[1] = (unsigned long)arg1; \
4956 _argvec[2] = (unsigned long)arg2; \
4957 _argvec[3] = (unsigned long)arg3; \
4958 _argvec[4] = (unsigned long)arg4; \
4959 _argvec[5] = (unsigned long)arg5; \
4960 _argvec[6] = (unsigned long)arg6; \
4961 __asm__ volatile( \
4962 VALGRIND_CFI_PROLOGUE \
4963 "aghi 15,-168\n\t" \
4964 "lg 2, 8(1)\n\t" \
4965 "lg 3,16(1)\n\t" \
4966 "lg 4,24(1)\n\t" \
4967 "lg 5,32(1)\n\t" \
4968 "lg 6,40(1)\n\t" \
4969 "mvc 160(8,15), 48(1)\n\t" \
4970 "lg 1, 0(1)\n\t" \
4971 VALGRIND_CALL_NOREDIR_R1 \
4972 "aghi 15,168\n\t" \
4973 VALGRIND_CFI_EPILOGUE \
4974 "lgr %0, 2\n\t" \
4975 : /*out*/ "=d" (_res) \
4976 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4977 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4978 ); \
4979 lval = (__typeof__(lval)) _res; \
4980 } while (0)
4982 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4983 arg6, arg7) \
4984 do { \
4985 volatile OrigFn _orig = (orig); \
4986 volatile unsigned long _argvec[8]; \
4987 volatile unsigned long _res; \
4988 _argvec[0] = (unsigned long)_orig.nraddr; \
4989 _argvec[1] = (unsigned long)arg1; \
4990 _argvec[2] = (unsigned long)arg2; \
4991 _argvec[3] = (unsigned long)arg3; \
4992 _argvec[4] = (unsigned long)arg4; \
4993 _argvec[5] = (unsigned long)arg5; \
4994 _argvec[6] = (unsigned long)arg6; \
4995 _argvec[7] = (unsigned long)arg7; \
4996 __asm__ volatile( \
4997 VALGRIND_CFI_PROLOGUE \
4998 "aghi 15,-176\n\t" \
4999 "lg 2, 8(1)\n\t" \
5000 "lg 3,16(1)\n\t" \
5001 "lg 4,24(1)\n\t" \
5002 "lg 5,32(1)\n\t" \
5003 "lg 6,40(1)\n\t" \
5004 "mvc 160(8,15), 48(1)\n\t" \
5005 "mvc 168(8,15), 56(1)\n\t" \
5006 "lg 1, 0(1)\n\t" \
5007 VALGRIND_CALL_NOREDIR_R1 \
5008 "aghi 15,176\n\t" \
5009 VALGRIND_CFI_EPILOGUE \
5010 "lgr %0, 2\n\t" \
5011 : /*out*/ "=d" (_res) \
5012 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5013 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5014 ); \
5015 lval = (__typeof__(lval)) _res; \
5016 } while (0)
5018 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5019 arg6, arg7 ,arg8) \
5020 do { \
5021 volatile OrigFn _orig = (orig); \
5022 volatile unsigned long _argvec[9]; \
5023 volatile unsigned long _res; \
5024 _argvec[0] = (unsigned long)_orig.nraddr; \
5025 _argvec[1] = (unsigned long)arg1; \
5026 _argvec[2] = (unsigned long)arg2; \
5027 _argvec[3] = (unsigned long)arg3; \
5028 _argvec[4] = (unsigned long)arg4; \
5029 _argvec[5] = (unsigned long)arg5; \
5030 _argvec[6] = (unsigned long)arg6; \
5031 _argvec[7] = (unsigned long)arg7; \
5032 _argvec[8] = (unsigned long)arg8; \
5033 __asm__ volatile( \
5034 VALGRIND_CFI_PROLOGUE \
5035 "aghi 15,-184\n\t" \
5036 "lg 2, 8(1)\n\t" \
5037 "lg 3,16(1)\n\t" \
5038 "lg 4,24(1)\n\t" \
5039 "lg 5,32(1)\n\t" \
5040 "lg 6,40(1)\n\t" \
5041 "mvc 160(8,15), 48(1)\n\t" \
5042 "mvc 168(8,15), 56(1)\n\t" \
5043 "mvc 176(8,15), 64(1)\n\t" \
5044 "lg 1, 0(1)\n\t" \
5045 VALGRIND_CALL_NOREDIR_R1 \
5046 "aghi 15,184\n\t" \
5047 VALGRIND_CFI_EPILOGUE \
5048 "lgr %0, 2\n\t" \
5049 : /*out*/ "=d" (_res) \
5050 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5051 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5052 ); \
5053 lval = (__typeof__(lval)) _res; \
5054 } while (0)
5056 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5057 arg6, arg7 ,arg8, arg9) \
5058 do { \
5059 volatile OrigFn _orig = (orig); \
5060 volatile unsigned long _argvec[10]; \
5061 volatile unsigned long _res; \
5062 _argvec[0] = (unsigned long)_orig.nraddr; \
5063 _argvec[1] = (unsigned long)arg1; \
5064 _argvec[2] = (unsigned long)arg2; \
5065 _argvec[3] = (unsigned long)arg3; \
5066 _argvec[4] = (unsigned long)arg4; \
5067 _argvec[5] = (unsigned long)arg5; \
5068 _argvec[6] = (unsigned long)arg6; \
5069 _argvec[7] = (unsigned long)arg7; \
5070 _argvec[8] = (unsigned long)arg8; \
5071 _argvec[9] = (unsigned long)arg9; \
5072 __asm__ volatile( \
5073 VALGRIND_CFI_PROLOGUE \
5074 "aghi 15,-192\n\t" \
5075 "lg 2, 8(1)\n\t" \
5076 "lg 3,16(1)\n\t" \
5077 "lg 4,24(1)\n\t" \
5078 "lg 5,32(1)\n\t" \
5079 "lg 6,40(1)\n\t" \
5080 "mvc 160(8,15), 48(1)\n\t" \
5081 "mvc 168(8,15), 56(1)\n\t" \
5082 "mvc 176(8,15), 64(1)\n\t" \
5083 "mvc 184(8,15), 72(1)\n\t" \
5084 "lg 1, 0(1)\n\t" \
5085 VALGRIND_CALL_NOREDIR_R1 \
5086 "aghi 15,192\n\t" \
5087 VALGRIND_CFI_EPILOGUE \
5088 "lgr %0, 2\n\t" \
5089 : /*out*/ "=d" (_res) \
5090 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5091 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5092 ); \
5093 lval = (__typeof__(lval)) _res; \
5094 } while (0)
5096 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5097 arg6, arg7 ,arg8, arg9, arg10) \
5098 do { \
5099 volatile OrigFn _orig = (orig); \
5100 volatile unsigned long _argvec[11]; \
5101 volatile unsigned long _res; \
5102 _argvec[0] = (unsigned long)_orig.nraddr; \
5103 _argvec[1] = (unsigned long)arg1; \
5104 _argvec[2] = (unsigned long)arg2; \
5105 _argvec[3] = (unsigned long)arg3; \
5106 _argvec[4] = (unsigned long)arg4; \
5107 _argvec[5] = (unsigned long)arg5; \
5108 _argvec[6] = (unsigned long)arg6; \
5109 _argvec[7] = (unsigned long)arg7; \
5110 _argvec[8] = (unsigned long)arg8; \
5111 _argvec[9] = (unsigned long)arg9; \
5112 _argvec[10] = (unsigned long)arg10; \
5113 __asm__ volatile( \
5114 VALGRIND_CFI_PROLOGUE \
5115 "aghi 15,-200\n\t" \
5116 "lg 2, 8(1)\n\t" \
5117 "lg 3,16(1)\n\t" \
5118 "lg 4,24(1)\n\t" \
5119 "lg 5,32(1)\n\t" \
5120 "lg 6,40(1)\n\t" \
5121 "mvc 160(8,15), 48(1)\n\t" \
5122 "mvc 168(8,15), 56(1)\n\t" \
5123 "mvc 176(8,15), 64(1)\n\t" \
5124 "mvc 184(8,15), 72(1)\n\t" \
5125 "mvc 192(8,15), 80(1)\n\t" \
5126 "lg 1, 0(1)\n\t" \
5127 VALGRIND_CALL_NOREDIR_R1 \
5128 "aghi 15,200\n\t" \
5129 VALGRIND_CFI_EPILOGUE \
5130 "lgr %0, 2\n\t" \
5131 : /*out*/ "=d" (_res) \
5132 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5133 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5134 ); \
5135 lval = (__typeof__(lval)) _res; \
5136 } while (0)
5138 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5139 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5140 do { \
5141 volatile OrigFn _orig = (orig); \
5142 volatile unsigned long _argvec[12]; \
5143 volatile unsigned long _res; \
5144 _argvec[0] = (unsigned long)_orig.nraddr; \
5145 _argvec[1] = (unsigned long)arg1; \
5146 _argvec[2] = (unsigned long)arg2; \
5147 _argvec[3] = (unsigned long)arg3; \
5148 _argvec[4] = (unsigned long)arg4; \
5149 _argvec[5] = (unsigned long)arg5; \
5150 _argvec[6] = (unsigned long)arg6; \
5151 _argvec[7] = (unsigned long)arg7; \
5152 _argvec[8] = (unsigned long)arg8; \
5153 _argvec[9] = (unsigned long)arg9; \
5154 _argvec[10] = (unsigned long)arg10; \
5155 _argvec[11] = (unsigned long)arg11; \
5156 __asm__ volatile( \
5157 VALGRIND_CFI_PROLOGUE \
5158 "aghi 15,-208\n\t" \
5159 "lg 2, 8(1)\n\t" \
5160 "lg 3,16(1)\n\t" \
5161 "lg 4,24(1)\n\t" \
5162 "lg 5,32(1)\n\t" \
5163 "lg 6,40(1)\n\t" \
5164 "mvc 160(8,15), 48(1)\n\t" \
5165 "mvc 168(8,15), 56(1)\n\t" \
5166 "mvc 176(8,15), 64(1)\n\t" \
5167 "mvc 184(8,15), 72(1)\n\t" \
5168 "mvc 192(8,15), 80(1)\n\t" \
5169 "mvc 200(8,15), 88(1)\n\t" \
5170 "lg 1, 0(1)\n\t" \
5171 VALGRIND_CALL_NOREDIR_R1 \
5172 "aghi 15,208\n\t" \
5173 VALGRIND_CFI_EPILOGUE \
5174 "lgr %0, 2\n\t" \
5175 : /*out*/ "=d" (_res) \
5176 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5177 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5178 ); \
5179 lval = (__typeof__(lval)) _res; \
5180 } while (0)
5182 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5183 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5184 do { \
5185 volatile OrigFn _orig = (orig); \
5186 volatile unsigned long _argvec[13]; \
5187 volatile unsigned long _res; \
5188 _argvec[0] = (unsigned long)_orig.nraddr; \
5189 _argvec[1] = (unsigned long)arg1; \
5190 _argvec[2] = (unsigned long)arg2; \
5191 _argvec[3] = (unsigned long)arg3; \
5192 _argvec[4] = (unsigned long)arg4; \
5193 _argvec[5] = (unsigned long)arg5; \
5194 _argvec[6] = (unsigned long)arg6; \
5195 _argvec[7] = (unsigned long)arg7; \
5196 _argvec[8] = (unsigned long)arg8; \
5197 _argvec[9] = (unsigned long)arg9; \
5198 _argvec[10] = (unsigned long)arg10; \
5199 _argvec[11] = (unsigned long)arg11; \
5200 _argvec[12] = (unsigned long)arg12; \
5201 __asm__ volatile( \
5202 VALGRIND_CFI_PROLOGUE \
5203 "aghi 15,-216\n\t" \
5204 "lg 2, 8(1)\n\t" \
5205 "lg 3,16(1)\n\t" \
5206 "lg 4,24(1)\n\t" \
5207 "lg 5,32(1)\n\t" \
5208 "lg 6,40(1)\n\t" \
5209 "mvc 160(8,15), 48(1)\n\t" \
5210 "mvc 168(8,15), 56(1)\n\t" \
5211 "mvc 176(8,15), 64(1)\n\t" \
5212 "mvc 184(8,15), 72(1)\n\t" \
5213 "mvc 192(8,15), 80(1)\n\t" \
5214 "mvc 200(8,15), 88(1)\n\t" \
5215 "mvc 208(8,15), 96(1)\n\t" \
5216 "lg 1, 0(1)\n\t" \
5217 VALGRIND_CALL_NOREDIR_R1 \
5218 "aghi 15,216\n\t" \
5219 VALGRIND_CFI_EPILOGUE \
5220 "lgr %0, 2\n\t" \
5221 : /*out*/ "=d" (_res) \
5222 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5223 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5224 ); \
5225 lval = (__typeof__(lval)) _res; \
5226 } while (0)
5229 #endif /* PLAT_s390x_linux */
5231 /* ------------------------- mips32-linux ----------------------- */
5233 #if defined(PLAT_mips32_linux)
5235 /* These regs are trashed by the hidden call. */
5236 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5237 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5238 "$25", "$31"
5240 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5241 long) == 4. */
5243 #define CALL_FN_W_v(lval, orig) \
5244 do { \
5245 volatile OrigFn _orig = (orig); \
5246 volatile unsigned long _argvec[1]; \
5247 volatile unsigned long _res; \
5248 _argvec[0] = (unsigned long)_orig.nraddr; \
5249 __asm__ volatile( \
5250 "subu $29, $29, 8 \n\t" \
5251 "sw $28, 0($29) \n\t" \
5252 "sw $31, 4($29) \n\t" \
5253 "subu $29, $29, 16 \n\t" \
5254 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5255 VALGRIND_CALL_NOREDIR_T9 \
5256 "addu $29, $29, 16\n\t" \
5257 "lw $28, 0($29) \n\t" \
5258 "lw $31, 4($29) \n\t" \
5259 "addu $29, $29, 8 \n\t" \
5260 "move %0, $2\n" \
5261 : /*out*/ "=r" (_res) \
5262 : /*in*/ "0" (&_argvec[0]) \
5263 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5264 ); \
5265 lval = (__typeof__(lval)) _res; \
5266 } while (0)
5268 #define CALL_FN_W_W(lval, orig, arg1) \
5269 do { \
5270 volatile OrigFn _orig = (orig); \
5271 volatile unsigned long _argvec[2]; \
5272 volatile unsigned long _res; \
5273 _argvec[0] = (unsigned long)_orig.nraddr; \
5274 _argvec[1] = (unsigned long)(arg1); \
5275 __asm__ volatile( \
5276 "subu $29, $29, 8 \n\t" \
5277 "sw $28, 0($29) \n\t" \
5278 "sw $31, 4($29) \n\t" \
5279 "subu $29, $29, 16 \n\t" \
5280 "lw $4, 4(%1) \n\t" /* arg1*/ \
5281 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5282 VALGRIND_CALL_NOREDIR_T9 \
5283 "addu $29, $29, 16 \n\t" \
5284 "lw $28, 0($29) \n\t" \
5285 "lw $31, 4($29) \n\t" \
5286 "addu $29, $29, 8 \n\t" \
5287 "move %0, $2\n" \
5288 : /*out*/ "=r" (_res) \
5289 : /*in*/ "0" (&_argvec[0]) \
5290 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5291 ); \
5292 lval = (__typeof__(lval)) _res; \
5293 } while (0)
5295 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5296 do { \
5297 volatile OrigFn _orig = (orig); \
5298 volatile unsigned long _argvec[3]; \
5299 volatile unsigned long _res; \
5300 _argvec[0] = (unsigned long)_orig.nraddr; \
5301 _argvec[1] = (unsigned long)(arg1); \
5302 _argvec[2] = (unsigned long)(arg2); \
5303 __asm__ volatile( \
5304 "subu $29, $29, 8 \n\t" \
5305 "sw $28, 0($29) \n\t" \
5306 "sw $31, 4($29) \n\t" \
5307 "subu $29, $29, 16 \n\t" \
5308 "lw $4, 4(%1) \n\t" \
5309 "lw $5, 8(%1) \n\t" \
5310 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5311 VALGRIND_CALL_NOREDIR_T9 \
5312 "addu $29, $29, 16 \n\t" \
5313 "lw $28, 0($29) \n\t" \
5314 "lw $31, 4($29) \n\t" \
5315 "addu $29, $29, 8 \n\t" \
5316 "move %0, $2\n" \
5317 : /*out*/ "=r" (_res) \
5318 : /*in*/ "0" (&_argvec[0]) \
5319 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5320 ); \
5321 lval = (__typeof__(lval)) _res; \
5322 } while (0)
5324 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5325 do { \
5326 volatile OrigFn _orig = (orig); \
5327 volatile unsigned long _argvec[4]; \
5328 volatile unsigned long _res; \
5329 _argvec[0] = (unsigned long)_orig.nraddr; \
5330 _argvec[1] = (unsigned long)(arg1); \
5331 _argvec[2] = (unsigned long)(arg2); \
5332 _argvec[3] = (unsigned long)(arg3); \
5333 __asm__ volatile( \
5334 "subu $29, $29, 8 \n\t" \
5335 "sw $28, 0($29) \n\t" \
5336 "sw $31, 4($29) \n\t" \
5337 "subu $29, $29, 16 \n\t" \
5338 "lw $4, 4(%1) \n\t" \
5339 "lw $5, 8(%1) \n\t" \
5340 "lw $6, 12(%1) \n\t" \
5341 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5342 VALGRIND_CALL_NOREDIR_T9 \
5343 "addu $29, $29, 16 \n\t" \
5344 "lw $28, 0($29) \n\t" \
5345 "lw $31, 4($29) \n\t" \
5346 "addu $29, $29, 8 \n\t" \
5347 "move %0, $2\n" \
5348 : /*out*/ "=r" (_res) \
5349 : /*in*/ "0" (&_argvec[0]) \
5350 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5351 ); \
5352 lval = (__typeof__(lval)) _res; \
5353 } while (0)
5355 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5356 do { \
5357 volatile OrigFn _orig = (orig); \
5358 volatile unsigned long _argvec[5]; \
5359 volatile unsigned long _res; \
5360 _argvec[0] = (unsigned long)_orig.nraddr; \
5361 _argvec[1] = (unsigned long)(arg1); \
5362 _argvec[2] = (unsigned long)(arg2); \
5363 _argvec[3] = (unsigned long)(arg3); \
5364 _argvec[4] = (unsigned long)(arg4); \
5365 __asm__ volatile( \
5366 "subu $29, $29, 8 \n\t" \
5367 "sw $28, 0($29) \n\t" \
5368 "sw $31, 4($29) \n\t" \
5369 "subu $29, $29, 16 \n\t" \
5370 "lw $4, 4(%1) \n\t" \
5371 "lw $5, 8(%1) \n\t" \
5372 "lw $6, 12(%1) \n\t" \
5373 "lw $7, 16(%1) \n\t" \
5374 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5375 VALGRIND_CALL_NOREDIR_T9 \
5376 "addu $29, $29, 16 \n\t" \
5377 "lw $28, 0($29) \n\t" \
5378 "lw $31, 4($29) \n\t" \
5379 "addu $29, $29, 8 \n\t" \
5380 "move %0, $2\n" \
5381 : /*out*/ "=r" (_res) \
5382 : /*in*/ "0" (&_argvec[0]) \
5383 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5384 ); \
5385 lval = (__typeof__(lval)) _res; \
5386 } while (0)
5388 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5389 do { \
5390 volatile OrigFn _orig = (orig); \
5391 volatile unsigned long _argvec[6]; \
5392 volatile unsigned long _res; \
5393 _argvec[0] = (unsigned long)_orig.nraddr; \
5394 _argvec[1] = (unsigned long)(arg1); \
5395 _argvec[2] = (unsigned long)(arg2); \
5396 _argvec[3] = (unsigned long)(arg3); \
5397 _argvec[4] = (unsigned long)(arg4); \
5398 _argvec[5] = (unsigned long)(arg5); \
5399 __asm__ volatile( \
5400 "subu $29, $29, 8 \n\t" \
5401 "sw $28, 0($29) \n\t" \
5402 "sw $31, 4($29) \n\t" \
5403 "lw $4, 20(%1) \n\t" \
5404 "subu $29, $29, 24\n\t" \
5405 "sw $4, 16($29) \n\t" \
5406 "lw $4, 4(%1) \n\t" \
5407 "lw $5, 8(%1) \n\t" \
5408 "lw $6, 12(%1) \n\t" \
5409 "lw $7, 16(%1) \n\t" \
5410 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5411 VALGRIND_CALL_NOREDIR_T9 \
5412 "addu $29, $29, 24 \n\t" \
5413 "lw $28, 0($29) \n\t" \
5414 "lw $31, 4($29) \n\t" \
5415 "addu $29, $29, 8 \n\t" \
5416 "move %0, $2\n" \
5417 : /*out*/ "=r" (_res) \
5418 : /*in*/ "0" (&_argvec[0]) \
5419 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5420 ); \
5421 lval = (__typeof__(lval)) _res; \
5422 } while (0)
5423 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5424 do { \
5425 volatile OrigFn _orig = (orig); \
5426 volatile unsigned long _argvec[7]; \
5427 volatile unsigned long _res; \
5428 _argvec[0] = (unsigned long)_orig.nraddr; \
5429 _argvec[1] = (unsigned long)(arg1); \
5430 _argvec[2] = (unsigned long)(arg2); \
5431 _argvec[3] = (unsigned long)(arg3); \
5432 _argvec[4] = (unsigned long)(arg4); \
5433 _argvec[5] = (unsigned long)(arg5); \
5434 _argvec[6] = (unsigned long)(arg6); \
5435 __asm__ volatile( \
5436 "subu $29, $29, 8 \n\t" \
5437 "sw $28, 0($29) \n\t" \
5438 "sw $31, 4($29) \n\t" \
5439 "lw $4, 20(%1) \n\t" \
5440 "subu $29, $29, 32\n\t" \
5441 "sw $4, 16($29) \n\t" \
5442 "lw $4, 24(%1) \n\t" \
5443 "nop\n\t" \
5444 "sw $4, 20($29) \n\t" \
5445 "lw $4, 4(%1) \n\t" \
5446 "lw $5, 8(%1) \n\t" \
5447 "lw $6, 12(%1) \n\t" \
5448 "lw $7, 16(%1) \n\t" \
5449 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5450 VALGRIND_CALL_NOREDIR_T9 \
5451 "addu $29, $29, 32 \n\t" \
5452 "lw $28, 0($29) \n\t" \
5453 "lw $31, 4($29) \n\t" \
5454 "addu $29, $29, 8 \n\t" \
5455 "move %0, $2\n" \
5456 : /*out*/ "=r" (_res) \
5457 : /*in*/ "0" (&_argvec[0]) \
5458 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5459 ); \
5460 lval = (__typeof__(lval)) _res; \
5461 } while (0)
5463 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5464 arg7) \
5465 do { \
5466 volatile OrigFn _orig = (orig); \
5467 volatile unsigned long _argvec[8]; \
5468 volatile unsigned long _res; \
5469 _argvec[0] = (unsigned long)_orig.nraddr; \
5470 _argvec[1] = (unsigned long)(arg1); \
5471 _argvec[2] = (unsigned long)(arg2); \
5472 _argvec[3] = (unsigned long)(arg3); \
5473 _argvec[4] = (unsigned long)(arg4); \
5474 _argvec[5] = (unsigned long)(arg5); \
5475 _argvec[6] = (unsigned long)(arg6); \
5476 _argvec[7] = (unsigned long)(arg7); \
5477 __asm__ volatile( \
5478 "subu $29, $29, 8 \n\t" \
5479 "sw $28, 0($29) \n\t" \
5480 "sw $31, 4($29) \n\t" \
5481 "lw $4, 20(%1) \n\t" \
5482 "subu $29, $29, 32\n\t" \
5483 "sw $4, 16($29) \n\t" \
5484 "lw $4, 24(%1) \n\t" \
5485 "sw $4, 20($29) \n\t" \
5486 "lw $4, 28(%1) \n\t" \
5487 "sw $4, 24($29) \n\t" \
5488 "lw $4, 4(%1) \n\t" \
5489 "lw $5, 8(%1) \n\t" \
5490 "lw $6, 12(%1) \n\t" \
5491 "lw $7, 16(%1) \n\t" \
5492 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5493 VALGRIND_CALL_NOREDIR_T9 \
5494 "addu $29, $29, 32 \n\t" \
5495 "lw $28, 0($29) \n\t" \
5496 "lw $31, 4($29) \n\t" \
5497 "addu $29, $29, 8 \n\t" \
5498 "move %0, $2\n" \
5499 : /*out*/ "=r" (_res) \
5500 : /*in*/ "0" (&_argvec[0]) \
5501 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5502 ); \
5503 lval = (__typeof__(lval)) _res; \
5504 } while (0)
5506 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5507 arg7,arg8) \
5508 do { \
5509 volatile OrigFn _orig = (orig); \
5510 volatile unsigned long _argvec[9]; \
5511 volatile unsigned long _res; \
5512 _argvec[0] = (unsigned long)_orig.nraddr; \
5513 _argvec[1] = (unsigned long)(arg1); \
5514 _argvec[2] = (unsigned long)(arg2); \
5515 _argvec[3] = (unsigned long)(arg3); \
5516 _argvec[4] = (unsigned long)(arg4); \
5517 _argvec[5] = (unsigned long)(arg5); \
5518 _argvec[6] = (unsigned long)(arg6); \
5519 _argvec[7] = (unsigned long)(arg7); \
5520 _argvec[8] = (unsigned long)(arg8); \
5521 __asm__ volatile( \
5522 "subu $29, $29, 8 \n\t" \
5523 "sw $28, 0($29) \n\t" \
5524 "sw $31, 4($29) \n\t" \
5525 "lw $4, 20(%1) \n\t" \
5526 "subu $29, $29, 40\n\t" \
5527 "sw $4, 16($29) \n\t" \
5528 "lw $4, 24(%1) \n\t" \
5529 "sw $4, 20($29) \n\t" \
5530 "lw $4, 28(%1) \n\t" \
5531 "sw $4, 24($29) \n\t" \
5532 "lw $4, 32(%1) \n\t" \
5533 "sw $4, 28($29) \n\t" \
5534 "lw $4, 4(%1) \n\t" \
5535 "lw $5, 8(%1) \n\t" \
5536 "lw $6, 12(%1) \n\t" \
5537 "lw $7, 16(%1) \n\t" \
5538 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5539 VALGRIND_CALL_NOREDIR_T9 \
5540 "addu $29, $29, 40 \n\t" \
5541 "lw $28, 0($29) \n\t" \
5542 "lw $31, 4($29) \n\t" \
5543 "addu $29, $29, 8 \n\t" \
5544 "move %0, $2\n" \
5545 : /*out*/ "=r" (_res) \
5546 : /*in*/ "0" (&_argvec[0]) \
5547 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5548 ); \
5549 lval = (__typeof__(lval)) _res; \
5550 } while (0)
5552 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5553 arg7,arg8,arg9) \
5554 do { \
5555 volatile OrigFn _orig = (orig); \
5556 volatile unsigned long _argvec[10]; \
5557 volatile unsigned long _res; \
5558 _argvec[0] = (unsigned long)_orig.nraddr; \
5559 _argvec[1] = (unsigned long)(arg1); \
5560 _argvec[2] = (unsigned long)(arg2); \
5561 _argvec[3] = (unsigned long)(arg3); \
5562 _argvec[4] = (unsigned long)(arg4); \
5563 _argvec[5] = (unsigned long)(arg5); \
5564 _argvec[6] = (unsigned long)(arg6); \
5565 _argvec[7] = (unsigned long)(arg7); \
5566 _argvec[8] = (unsigned long)(arg8); \
5567 _argvec[9] = (unsigned long)(arg9); \
5568 __asm__ volatile( \
5569 "subu $29, $29, 8 \n\t" \
5570 "sw $28, 0($29) \n\t" \
5571 "sw $31, 4($29) \n\t" \
5572 "lw $4, 20(%1) \n\t" \
5573 "subu $29, $29, 40\n\t" \
5574 "sw $4, 16($29) \n\t" \
5575 "lw $4, 24(%1) \n\t" \
5576 "sw $4, 20($29) \n\t" \
5577 "lw $4, 28(%1) \n\t" \
5578 "sw $4, 24($29) \n\t" \
5579 "lw $4, 32(%1) \n\t" \
5580 "sw $4, 28($29) \n\t" \
5581 "lw $4, 36(%1) \n\t" \
5582 "sw $4, 32($29) \n\t" \
5583 "lw $4, 4(%1) \n\t" \
5584 "lw $5, 8(%1) \n\t" \
5585 "lw $6, 12(%1) \n\t" \
5586 "lw $7, 16(%1) \n\t" \
5587 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5588 VALGRIND_CALL_NOREDIR_T9 \
5589 "addu $29, $29, 40 \n\t" \
5590 "lw $28, 0($29) \n\t" \
5591 "lw $31, 4($29) \n\t" \
5592 "addu $29, $29, 8 \n\t" \
5593 "move %0, $2\n" \
5594 : /*out*/ "=r" (_res) \
5595 : /*in*/ "0" (&_argvec[0]) \
5596 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5597 ); \
5598 lval = (__typeof__(lval)) _res; \
5599 } while (0)
5601 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5602 arg7,arg8,arg9,arg10) \
5603 do { \
5604 volatile OrigFn _orig = (orig); \
5605 volatile unsigned long _argvec[11]; \
5606 volatile unsigned long _res; \
5607 _argvec[0] = (unsigned long)_orig.nraddr; \
5608 _argvec[1] = (unsigned long)(arg1); \
5609 _argvec[2] = (unsigned long)(arg2); \
5610 _argvec[3] = (unsigned long)(arg3); \
5611 _argvec[4] = (unsigned long)(arg4); \
5612 _argvec[5] = (unsigned long)(arg5); \
5613 _argvec[6] = (unsigned long)(arg6); \
5614 _argvec[7] = (unsigned long)(arg7); \
5615 _argvec[8] = (unsigned long)(arg8); \
5616 _argvec[9] = (unsigned long)(arg9); \
5617 _argvec[10] = (unsigned long)(arg10); \
5618 __asm__ volatile( \
5619 "subu $29, $29, 8 \n\t" \
5620 "sw $28, 0($29) \n\t" \
5621 "sw $31, 4($29) \n\t" \
5622 "lw $4, 20(%1) \n\t" \
5623 "subu $29, $29, 48\n\t" \
5624 "sw $4, 16($29) \n\t" \
5625 "lw $4, 24(%1) \n\t" \
5626 "sw $4, 20($29) \n\t" \
5627 "lw $4, 28(%1) \n\t" \
5628 "sw $4, 24($29) \n\t" \
5629 "lw $4, 32(%1) \n\t" \
5630 "sw $4, 28($29) \n\t" \
5631 "lw $4, 36(%1) \n\t" \
5632 "sw $4, 32($29) \n\t" \
5633 "lw $4, 40(%1) \n\t" \
5634 "sw $4, 36($29) \n\t" \
5635 "lw $4, 4(%1) \n\t" \
5636 "lw $5, 8(%1) \n\t" \
5637 "lw $6, 12(%1) \n\t" \
5638 "lw $7, 16(%1) \n\t" \
5639 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5640 VALGRIND_CALL_NOREDIR_T9 \
5641 "addu $29, $29, 48 \n\t" \
5642 "lw $28, 0($29) \n\t" \
5643 "lw $31, 4($29) \n\t" \
5644 "addu $29, $29, 8 \n\t" \
5645 "move %0, $2\n" \
5646 : /*out*/ "=r" (_res) \
5647 : /*in*/ "0" (&_argvec[0]) \
5648 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5649 ); \
5650 lval = (__typeof__(lval)) _res; \
5651 } while (0)
5653 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5654 arg6,arg7,arg8,arg9,arg10, \
5655 arg11) \
5656 do { \
5657 volatile OrigFn _orig = (orig); \
5658 volatile unsigned long _argvec[12]; \
5659 volatile unsigned long _res; \
5660 _argvec[0] = (unsigned long)_orig.nraddr; \
5661 _argvec[1] = (unsigned long)(arg1); \
5662 _argvec[2] = (unsigned long)(arg2); \
5663 _argvec[3] = (unsigned long)(arg3); \
5664 _argvec[4] = (unsigned long)(arg4); \
5665 _argvec[5] = (unsigned long)(arg5); \
5666 _argvec[6] = (unsigned long)(arg6); \
5667 _argvec[7] = (unsigned long)(arg7); \
5668 _argvec[8] = (unsigned long)(arg8); \
5669 _argvec[9] = (unsigned long)(arg9); \
5670 _argvec[10] = (unsigned long)(arg10); \
5671 _argvec[11] = (unsigned long)(arg11); \
5672 __asm__ volatile( \
5673 "subu $29, $29, 8 \n\t" \
5674 "sw $28, 0($29) \n\t" \
5675 "sw $31, 4($29) \n\t" \
5676 "lw $4, 20(%1) \n\t" \
5677 "subu $29, $29, 48\n\t" \
5678 "sw $4, 16($29) \n\t" \
5679 "lw $4, 24(%1) \n\t" \
5680 "sw $4, 20($29) \n\t" \
5681 "lw $4, 28(%1) \n\t" \
5682 "sw $4, 24($29) \n\t" \
5683 "lw $4, 32(%1) \n\t" \
5684 "sw $4, 28($29) \n\t" \
5685 "lw $4, 36(%1) \n\t" \
5686 "sw $4, 32($29) \n\t" \
5687 "lw $4, 40(%1) \n\t" \
5688 "sw $4, 36($29) \n\t" \
5689 "lw $4, 44(%1) \n\t" \
5690 "sw $4, 40($29) \n\t" \
5691 "lw $4, 4(%1) \n\t" \
5692 "lw $5, 8(%1) \n\t" \
5693 "lw $6, 12(%1) \n\t" \
5694 "lw $7, 16(%1) \n\t" \
5695 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5696 VALGRIND_CALL_NOREDIR_T9 \
5697 "addu $29, $29, 48 \n\t" \
5698 "lw $28, 0($29) \n\t" \
5699 "lw $31, 4($29) \n\t" \
5700 "addu $29, $29, 8 \n\t" \
5701 "move %0, $2\n" \
5702 : /*out*/ "=r" (_res) \
5703 : /*in*/ "0" (&_argvec[0]) \
5704 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5705 ); \
5706 lval = (__typeof__(lval)) _res; \
5707 } while (0)
5709 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5710 arg6,arg7,arg8,arg9,arg10, \
5711 arg11,arg12) \
5712 do { \
5713 volatile OrigFn _orig = (orig); \
5714 volatile unsigned long _argvec[13]; \
5715 volatile unsigned long _res; \
5716 _argvec[0] = (unsigned long)_orig.nraddr; \
5717 _argvec[1] = (unsigned long)(arg1); \
5718 _argvec[2] = (unsigned long)(arg2); \
5719 _argvec[3] = (unsigned long)(arg3); \
5720 _argvec[4] = (unsigned long)(arg4); \
5721 _argvec[5] = (unsigned long)(arg5); \
5722 _argvec[6] = (unsigned long)(arg6); \
5723 _argvec[7] = (unsigned long)(arg7); \
5724 _argvec[8] = (unsigned long)(arg8); \
5725 _argvec[9] = (unsigned long)(arg9); \
5726 _argvec[10] = (unsigned long)(arg10); \
5727 _argvec[11] = (unsigned long)(arg11); \
5728 _argvec[12] = (unsigned long)(arg12); \
5729 __asm__ volatile( \
5730 "subu $29, $29, 8 \n\t" \
5731 "sw $28, 0($29) \n\t" \
5732 "sw $31, 4($29) \n\t" \
5733 "lw $4, 20(%1) \n\t" \
5734 "subu $29, $29, 56\n\t" \
5735 "sw $4, 16($29) \n\t" \
5736 "lw $4, 24(%1) \n\t" \
5737 "sw $4, 20($29) \n\t" \
5738 "lw $4, 28(%1) \n\t" \
5739 "sw $4, 24($29) \n\t" \
5740 "lw $4, 32(%1) \n\t" \
5741 "sw $4, 28($29) \n\t" \
5742 "lw $4, 36(%1) \n\t" \
5743 "sw $4, 32($29) \n\t" \
5744 "lw $4, 40(%1) \n\t" \
5745 "sw $4, 36($29) \n\t" \
5746 "lw $4, 44(%1) \n\t" \
5747 "sw $4, 40($29) \n\t" \
5748 "lw $4, 48(%1) \n\t" \
5749 "sw $4, 44($29) \n\t" \
5750 "lw $4, 4(%1) \n\t" \
5751 "lw $5, 8(%1) \n\t" \
5752 "lw $6, 12(%1) \n\t" \
5753 "lw $7, 16(%1) \n\t" \
5754 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5755 VALGRIND_CALL_NOREDIR_T9 \
5756 "addu $29, $29, 56 \n\t" \
5757 "lw $28, 0($29) \n\t" \
5758 "lw $31, 4($29) \n\t" \
5759 "addu $29, $29, 8 \n\t" \
5760 "move %0, $2\n" \
5761 : /*out*/ "=r" (_res) \
5762 : /*in*/ "r" (&_argvec[0]) \
5763 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5764 ); \
5765 lval = (__typeof__(lval)) _res; \
5766 } while (0)
5768 #endif /* PLAT_mips32_linux */
5770 /* ------------------------- nanomips-linux -------------------- */
5772 #if defined(PLAT_nanomips_linux)
5774 /* These regs are trashed by the hidden call. */
5775 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5776 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5777 "$t8","$t9", "$at"
5779 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5780 long) == 4. */
5782 #define CALL_FN_W_v(lval, orig) \
5783 do { \
5784 volatile OrigFn _orig = (orig); \
5785 volatile unsigned long _argvec[1]; \
5786 volatile unsigned long _res; \
5787 _argvec[0] = (unsigned long)_orig.nraddr; \
5788 __asm__ volatile( \
5789 "lw $t9, 0(%1)\n\t" \
5790 VALGRIND_CALL_NOREDIR_T9 \
5791 "move %0, $a0\n" \
5792 : /*out*/ "=r" (_res) \
5793 : /*in*/ "r" (&_argvec[0]) \
5794 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5795 ); \
5796 lval = (__typeof__(lval)) _res; \
5797 } while (0)
5799 #define CALL_FN_W_W(lval, orig, arg1) \
5800 do { \
5801 volatile OrigFn _orig = (orig); \
5802 volatile unsigned long _argvec[2]; \
5803 volatile unsigned long _res; \
5804 _argvec[0] = (unsigned long)_orig.nraddr; \
5805 _argvec[1] = (unsigned long)(arg1); \
5806 __asm__ volatile( \
5807 "lw $t9, 0(%1)\n\t" \
5808 "lw $a0, 4(%1)\n\t" \
5809 VALGRIND_CALL_NOREDIR_T9 \
5810 "move %0, $a0\n" \
5811 : /*out*/ "=r" (_res) \
5812 : /*in*/ "r" (&_argvec[0]) \
5813 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5814 ); \
5815 lval = (__typeof__(lval)) _res; \
5816 } while (0)
5818 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5819 do { \
5820 volatile OrigFn _orig = (orig); \
5821 volatile unsigned long _argvec[3]; \
5822 volatile unsigned long _res; \
5823 _argvec[0] = (unsigned long)_orig.nraddr; \
5824 _argvec[1] = (unsigned long)(arg1); \
5825 _argvec[2] = (unsigned long)(arg2); \
5826 __asm__ volatile( \
5827 "lw $t9, 0(%1)\n\t" \
5828 "lw $a0, 4(%1)\n\t" \
5829 "lw $a1, 8(%1)\n\t" \
5830 VALGRIND_CALL_NOREDIR_T9 \
5831 "move %0, $a0\n" \
5832 : /*out*/ "=r" (_res) \
5833 : /*in*/ "r" (&_argvec[0]) \
5834 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5835 ); \
5836 lval = (__typeof__(lval)) _res; \
5837 } while (0)
5839 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5840 do { \
5841 volatile OrigFn _orig = (orig); \
5842 volatile unsigned long _argvec[4]; \
5843 volatile unsigned long _res; \
5844 _argvec[0] = (unsigned long)_orig.nraddr; \
5845 _argvec[1] = (unsigned long)(arg1); \
5846 _argvec[2] = (unsigned long)(arg2); \
5847 _argvec[3] = (unsigned long)(arg3); \
5848 __asm__ volatile( \
5849 "lw $t9, 0(%1)\n\t" \
5850 "lw $a0, 4(%1)\n\t" \
5851 "lw $a1, 8(%1)\n\t" \
5852 "lw $a2,12(%1)\n\t" \
5853 VALGRIND_CALL_NOREDIR_T9 \
5854 "move %0, $a0\n" \
5855 : /*out*/ "=r" (_res) \
5856 : /*in*/ "r" (&_argvec[0]) \
5857 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5858 ); \
5859 lval = (__typeof__(lval)) _res; \
5860 } while (0)
5862 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5863 do { \
5864 volatile OrigFn _orig = (orig); \
5865 volatile unsigned long _argvec[5]; \
5866 volatile unsigned long _res; \
5867 _argvec[0] = (unsigned long)_orig.nraddr; \
5868 _argvec[1] = (unsigned long)(arg1); \
5869 _argvec[2] = (unsigned long)(arg2); \
5870 _argvec[3] = (unsigned long)(arg3); \
5871 _argvec[4] = (unsigned long)(arg4); \
5872 __asm__ volatile( \
5873 "lw $t9, 0(%1)\n\t" \
5874 "lw $a0, 4(%1)\n\t" \
5875 "lw $a1, 8(%1)\n\t" \
5876 "lw $a2,12(%1)\n\t" \
5877 "lw $a3,16(%1)\n\t" \
5878 VALGRIND_CALL_NOREDIR_T9 \
5879 "move %0, $a0\n" \
5880 : /*out*/ "=r" (_res) \
5881 : /*in*/ "r" (&_argvec[0]) \
5882 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5883 ); \
5884 lval = (__typeof__(lval)) _res; \
5885 } while (0)
5887 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5888 do { \
5889 volatile OrigFn _orig = (orig); \
5890 volatile unsigned long _argvec[6]; \
5891 volatile unsigned long _res; \
5892 _argvec[0] = (unsigned long)_orig.nraddr; \
5893 _argvec[1] = (unsigned long)(arg1); \
5894 _argvec[2] = (unsigned long)(arg2); \
5895 _argvec[3] = (unsigned long)(arg3); \
5896 _argvec[4] = (unsigned long)(arg4); \
5897 _argvec[5] = (unsigned long)(arg5); \
5898 __asm__ volatile( \
5899 "lw $t9, 0(%1)\n\t" \
5900 "lw $a0, 4(%1)\n\t" \
5901 "lw $a1, 8(%1)\n\t" \
5902 "lw $a2,12(%1)\n\t" \
5903 "lw $a3,16(%1)\n\t" \
5904 "lw $a4,20(%1)\n\t" \
5905 VALGRIND_CALL_NOREDIR_T9 \
5906 "move %0, $a0\n" \
5907 : /*out*/ "=r" (_res) \
5908 : /*in*/ "r" (&_argvec[0]) \
5909 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5910 ); \
5911 lval = (__typeof__(lval)) _res; \
5912 } while (0)
5913 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5914 do { \
5915 volatile OrigFn _orig = (orig); \
5916 volatile unsigned long _argvec[7]; \
5917 volatile unsigned long _res; \
5918 _argvec[0] = (unsigned long)_orig.nraddr; \
5919 _argvec[1] = (unsigned long)(arg1); \
5920 _argvec[2] = (unsigned long)(arg2); \
5921 _argvec[3] = (unsigned long)(arg3); \
5922 _argvec[4] = (unsigned long)(arg4); \
5923 _argvec[5] = (unsigned long)(arg5); \
5924 _argvec[6] = (unsigned long)(arg6); \
5925 __asm__ volatile( \
5926 "lw $t9, 0(%1)\n\t" \
5927 "lw $a0, 4(%1)\n\t" \
5928 "lw $a1, 8(%1)\n\t" \
5929 "lw $a2,12(%1)\n\t" \
5930 "lw $a3,16(%1)\n\t" \
5931 "lw $a4,20(%1)\n\t" \
5932 "lw $a5,24(%1)\n\t" \
5933 VALGRIND_CALL_NOREDIR_T9 \
5934 "move %0, $a0\n" \
5935 : /*out*/ "=r" (_res) \
5936 : /*in*/ "r" (&_argvec[0]) \
5937 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5938 ); \
5939 lval = (__typeof__(lval)) _res; \
5940 } while (0)
5942 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5943 arg7) \
5944 do { \
5945 volatile OrigFn _orig = (orig); \
5946 volatile unsigned long _argvec[8]; \
5947 volatile unsigned long _res; \
5948 _argvec[0] = (unsigned long)_orig.nraddr; \
5949 _argvec[1] = (unsigned long)(arg1); \
5950 _argvec[2] = (unsigned long)(arg2); \
5951 _argvec[3] = (unsigned long)(arg3); \
5952 _argvec[4] = (unsigned long)(arg4); \
5953 _argvec[5] = (unsigned long)(arg5); \
5954 _argvec[6] = (unsigned long)(arg6); \
5955 _argvec[7] = (unsigned long)(arg7); \
5956 __asm__ volatile( \
5957 "lw $t9, 0(%1)\n\t" \
5958 "lw $a0, 4(%1)\n\t" \
5959 "lw $a1, 8(%1)\n\t" \
5960 "lw $a2,12(%1)\n\t" \
5961 "lw $a3,16(%1)\n\t" \
5962 "lw $a4,20(%1)\n\t" \
5963 "lw $a5,24(%1)\n\t" \
5964 "lw $a6,28(%1)\n\t" \
5965 VALGRIND_CALL_NOREDIR_T9 \
5966 "move %0, $a0\n" \
5967 : /*out*/ "=r" (_res) \
5968 : /*in*/ "r" (&_argvec[0]) \
5969 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5970 ); \
5971 lval = (__typeof__(lval)) _res; \
5972 } while (0)
5974 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5975 arg7,arg8) \
5976 do { \
5977 volatile OrigFn _orig = (orig); \
5978 volatile unsigned long _argvec[9]; \
5979 volatile unsigned long _res; \
5980 _argvec[0] = (unsigned long)_orig.nraddr; \
5981 _argvec[1] = (unsigned long)(arg1); \
5982 _argvec[2] = (unsigned long)(arg2); \
5983 _argvec[3] = (unsigned long)(arg3); \
5984 _argvec[4] = (unsigned long)(arg4); \
5985 _argvec[5] = (unsigned long)(arg5); \
5986 _argvec[6] = (unsigned long)(arg6); \
5987 _argvec[7] = (unsigned long)(arg7); \
5988 _argvec[8] = (unsigned long)(arg8); \
5989 __asm__ volatile( \
5990 "lw $t9, 0(%1)\n\t" \
5991 "lw $a0, 4(%1)\n\t" \
5992 "lw $a1, 8(%1)\n\t" \
5993 "lw $a2,12(%1)\n\t" \
5994 "lw $a3,16(%1)\n\t" \
5995 "lw $a4,20(%1)\n\t" \
5996 "lw $a5,24(%1)\n\t" \
5997 "lw $a6,28(%1)\n\t" \
5998 "lw $a7,32(%1)\n\t" \
5999 VALGRIND_CALL_NOREDIR_T9 \
6000 "move %0, $a0\n" \
6001 : /*out*/ "=r" (_res) \
6002 : /*in*/ "r" (&_argvec[0]) \
6003 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6004 ); \
6005 lval = (__typeof__(lval)) _res; \
6006 } while (0)
6008 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6009 arg7,arg8,arg9) \
6010 do { \
6011 volatile OrigFn _orig = (orig); \
6012 volatile unsigned long _argvec[10]; \
6013 volatile unsigned long _res; \
6014 _argvec[0] = (unsigned long)_orig.nraddr; \
6015 _argvec[1] = (unsigned long)(arg1); \
6016 _argvec[2] = (unsigned long)(arg2); \
6017 _argvec[3] = (unsigned long)(arg3); \
6018 _argvec[4] = (unsigned long)(arg4); \
6019 _argvec[5] = (unsigned long)(arg5); \
6020 _argvec[6] = (unsigned long)(arg6); \
6021 _argvec[7] = (unsigned long)(arg7); \
6022 _argvec[8] = (unsigned long)(arg8); \
6023 _argvec[9] = (unsigned long)(arg9); \
6024 __asm__ volatile( \
6025 "addiu $sp, $sp, -16 \n\t" \
6026 "lw $t9,36(%1) \n\t" \
6027 "sw $t9, 0($sp) \n\t" \
6028 "lw $t9, 0(%1) \n\t" \
6029 "lw $a0, 4(%1) \n\t" \
6030 "lw $a1, 8(%1) \n\t" \
6031 "lw $a2,12(%1) \n\t" \
6032 "lw $a3,16(%1) \n\t" \
6033 "lw $a4,20(%1) \n\t" \
6034 "lw $a5,24(%1) \n\t" \
6035 "lw $a6,28(%1) \n\t" \
6036 "lw $a7,32(%1) \n\t" \
6037 VALGRIND_CALL_NOREDIR_T9 \
6038 "move %0, $a0 \n\t" \
6039 "addiu $sp, $sp, 16 \n\t" \
6040 : /*out*/ "=r" (_res) \
6041 : /*in*/ "r" (&_argvec[0]) \
6042 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6043 ); \
6044 lval = (__typeof__(lval)) _res; \
6045 } while (0)
6047 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6048 arg7,arg8,arg9,arg10) \
6049 do { \
6050 volatile OrigFn _orig = (orig); \
6051 volatile unsigned long _argvec[11]; \
6052 volatile unsigned long _res; \
6053 _argvec[0] = (unsigned long)_orig.nraddr; \
6054 _argvec[1] = (unsigned long)(arg1); \
6055 _argvec[2] = (unsigned long)(arg2); \
6056 _argvec[3] = (unsigned long)(arg3); \
6057 _argvec[4] = (unsigned long)(arg4); \
6058 _argvec[5] = (unsigned long)(arg5); \
6059 _argvec[6] = (unsigned long)(arg6); \
6060 _argvec[7] = (unsigned long)(arg7); \
6061 _argvec[8] = (unsigned long)(arg8); \
6062 _argvec[9] = (unsigned long)(arg9); \
6063 _argvec[10] = (unsigned long)(arg10); \
6064 __asm__ volatile( \
6065 "addiu $sp, $sp, -16 \n\t" \
6066 "lw $t9,36(%1) \n\t" \
6067 "sw $t9, 0($sp) \n\t" \
6068 "lw $t9,40(%1) \n\t" \
6069 "sw $t9, 4($sp) \n\t" \
6070 "lw $t9, 0(%1) \n\t" \
6071 "lw $a0, 4(%1) \n\t" \
6072 "lw $a1, 8(%1) \n\t" \
6073 "lw $a2,12(%1) \n\t" \
6074 "lw $a3,16(%1) \n\t" \
6075 "lw $a4,20(%1) \n\t" \
6076 "lw $a5,24(%1) \n\t" \
6077 "lw $a6,28(%1) \n\t" \
6078 "lw $a7,32(%1) \n\t" \
6079 VALGRIND_CALL_NOREDIR_T9 \
6080 "move %0, $a0 \n\t" \
6081 "addiu $sp, $sp, 16 \n\t" \
6082 : /*out*/ "=r" (_res) \
6083 : /*in*/ "r" (&_argvec[0]) \
6084 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6085 ); \
6086 lval = (__typeof__(lval)) _res; \
6087 } while (0)
6089 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6090 arg6,arg7,arg8,arg9,arg10, \
6091 arg11) \
6092 do { \
6093 volatile OrigFn _orig = (orig); \
6094 volatile unsigned long _argvec[12]; \
6095 volatile unsigned long _res; \
6096 _argvec[0] = (unsigned long)_orig.nraddr; \
6097 _argvec[1] = (unsigned long)(arg1); \
6098 _argvec[2] = (unsigned long)(arg2); \
6099 _argvec[3] = (unsigned long)(arg3); \
6100 _argvec[4] = (unsigned long)(arg4); \
6101 _argvec[5] = (unsigned long)(arg5); \
6102 _argvec[6] = (unsigned long)(arg6); \
6103 _argvec[7] = (unsigned long)(arg7); \
6104 _argvec[8] = (unsigned long)(arg8); \
6105 _argvec[9] = (unsigned long)(arg9); \
6106 _argvec[10] = (unsigned long)(arg10); \
6107 _argvec[11] = (unsigned long)(arg11); \
6108 __asm__ volatile( \
6109 "addiu $sp, $sp, -16 \n\t" \
6110 "lw $t9,36(%1) \n\t" \
6111 "sw $t9, 0($sp) \n\t" \
6112 "lw $t9,40(%1) \n\t" \
6113 "sw $t9, 4($sp) \n\t" \
6114 "lw $t9,44(%1) \n\t" \
6115 "sw $t9, 8($sp) \n\t" \
6116 "lw $t9, 0(%1) \n\t" \
6117 "lw $a0, 4(%1) \n\t" \
6118 "lw $a1, 8(%1) \n\t" \
6119 "lw $a2,12(%1) \n\t" \
6120 "lw $a3,16(%1) \n\t" \
6121 "lw $a4,20(%1) \n\t" \
6122 "lw $a5,24(%1) \n\t" \
6123 "lw $a6,28(%1) \n\t" \
6124 "lw $a7,32(%1) \n\t" \
6125 VALGRIND_CALL_NOREDIR_T9 \
6126 "move %0, $a0 \n\t" \
6127 "addiu $sp, $sp, 16 \n\t" \
6128 : /*out*/ "=r" (_res) \
6129 : /*in*/ "r" (&_argvec[0]) \
6130 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6131 ); \
6132 lval = (__typeof__(lval)) _res; \
6133 } while (0)
6135 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6136 arg6,arg7,arg8,arg9,arg10, \
6137 arg11,arg12) \
6138 do { \
6139 volatile OrigFn _orig = (orig); \
6140 volatile unsigned long _argvec[13]; \
6141 volatile unsigned long _res; \
6142 _argvec[0] = (unsigned long)_orig.nraddr; \
6143 _argvec[1] = (unsigned long)(arg1); \
6144 _argvec[2] = (unsigned long)(arg2); \
6145 _argvec[3] = (unsigned long)(arg3); \
6146 _argvec[4] = (unsigned long)(arg4); \
6147 _argvec[5] = (unsigned long)(arg5); \
6148 _argvec[6] = (unsigned long)(arg6); \
6149 _argvec[7] = (unsigned long)(arg7); \
6150 _argvec[8] = (unsigned long)(arg8); \
6151 _argvec[9] = (unsigned long)(arg9); \
6152 _argvec[10] = (unsigned long)(arg10); \
6153 _argvec[11] = (unsigned long)(arg11); \
6154 _argvec[12] = (unsigned long)(arg12); \
6155 __asm__ volatile( \
6156 "addiu $sp, $sp, -16 \n\t" \
6157 "lw $t9,36(%1) \n\t" \
6158 "sw $t9, 0($sp) \n\t" \
6159 "lw $t9,40(%1) \n\t" \
6160 "sw $t9, 4($sp) \n\t" \
6161 "lw $t9,44(%1) \n\t" \
6162 "sw $t9, 8($sp) \n\t" \
6163 "lw $t9,48(%1) \n\t" \
6164 "sw $t9,12($sp) \n\t" \
6165 "lw $t9, 0(%1) \n\t" \
6166 "lw $a0, 4(%1) \n\t" \
6167 "lw $a1, 8(%1) \n\t" \
6168 "lw $a2,12(%1) \n\t" \
6169 "lw $a3,16(%1) \n\t" \
6170 "lw $a4,20(%1) \n\t" \
6171 "lw $a5,24(%1) \n\t" \
6172 "lw $a6,28(%1) \n\t" \
6173 "lw $a7,32(%1) \n\t" \
6174 VALGRIND_CALL_NOREDIR_T9 \
6175 "move %0, $a0 \n\t" \
6176 "addiu $sp, $sp, 16 \n\t" \
6177 : /*out*/ "=r" (_res) \
6178 : /*in*/ "r" (&_argvec[0]) \
6179 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6180 ); \
6181 lval = (__typeof__(lval)) _res; \
6182 } while (0)
6184 #endif /* PLAT_nanomips_linux */
6186 /* ------------------------- mips64-linux ------------------------- */
6188 #if defined(PLAT_mips64_linux)
6190 /* These regs are trashed by the hidden call. */
6191 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6192 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6193 "$25", "$31"
6195 /* These CALL_FN_ macros assume that on mips64-linux,
6196 sizeof(long long) == 8. */
6198 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6200 #define CALL_FN_W_v(lval, orig) \
6201 do { \
6202 volatile OrigFn _orig = (orig); \
6203 volatile unsigned long long _argvec[1]; \
6204 volatile unsigned long long _res; \
6205 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6206 __asm__ volatile( \
6207 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6208 VALGRIND_CALL_NOREDIR_T9 \
6209 "move %0, $2\n" \
6210 : /*out*/ "=r" (_res) \
6211 : /*in*/ "0" (&_argvec[0]) \
6212 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6213 ); \
6214 lval = (__typeof__(lval)) (long)_res; \
6215 } while (0)
6217 #define CALL_FN_W_W(lval, orig, arg1) \
6218 do { \
6219 volatile OrigFn _orig = (orig); \
6220 volatile unsigned long long _argvec[2]; \
6221 volatile unsigned long long _res; \
6222 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6223 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6224 __asm__ volatile( \
6225 "ld $4, 8(%1)\n\t" /* arg1*/ \
6226 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6227 VALGRIND_CALL_NOREDIR_T9 \
6228 "move %0, $2\n" \
6229 : /*out*/ "=r" (_res) \
6230 : /*in*/ "r" (&_argvec[0]) \
6231 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6232 ); \
6233 lval = (__typeof__(lval)) (long)_res; \
6234 } while (0)
6236 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6237 do { \
6238 volatile OrigFn _orig = (orig); \
6239 volatile unsigned long long _argvec[3]; \
6240 volatile unsigned long long _res; \
6241 _argvec[0] = _orig.nraddr; \
6242 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6243 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6244 __asm__ volatile( \
6245 "ld $4, 8(%1)\n\t" \
6246 "ld $5, 16(%1)\n\t" \
6247 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6248 VALGRIND_CALL_NOREDIR_T9 \
6249 "move %0, $2\n" \
6250 : /*out*/ "=r" (_res) \
6251 : /*in*/ "r" (&_argvec[0]) \
6252 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6253 ); \
6254 lval = (__typeof__(lval)) (long)_res; \
6255 } while (0)
6258 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6259 do { \
6260 volatile OrigFn _orig = (orig); \
6261 volatile unsigned long long _argvec[4]; \
6262 volatile unsigned long long _res; \
6263 _argvec[0] = _orig.nraddr; \
6264 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6265 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6266 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6267 __asm__ volatile( \
6268 "ld $4, 8(%1)\n\t" \
6269 "ld $5, 16(%1)\n\t" \
6270 "ld $6, 24(%1)\n\t" \
6271 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6272 VALGRIND_CALL_NOREDIR_T9 \
6273 "move %0, $2\n" \
6274 : /*out*/ "=r" (_res) \
6275 : /*in*/ "r" (&_argvec[0]) \
6276 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6277 ); \
6278 lval = (__typeof__(lval)) (long)_res; \
6279 } while (0)
6281 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6282 do { \
6283 volatile OrigFn _orig = (orig); \
6284 volatile unsigned long long _argvec[5]; \
6285 volatile unsigned long long _res; \
6286 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6287 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6288 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6289 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6290 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6291 __asm__ volatile( \
6292 "ld $4, 8(%1)\n\t" \
6293 "ld $5, 16(%1)\n\t" \
6294 "ld $6, 24(%1)\n\t" \
6295 "ld $7, 32(%1)\n\t" \
6296 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6297 VALGRIND_CALL_NOREDIR_T9 \
6298 "move %0, $2\n" \
6299 : /*out*/ "=r" (_res) \
6300 : /*in*/ "r" (&_argvec[0]) \
6301 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6302 ); \
6303 lval = (__typeof__(lval)) (long)_res; \
6304 } while (0)
6306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6307 do { \
6308 volatile OrigFn _orig = (orig); \
6309 volatile unsigned long long _argvec[6]; \
6310 volatile unsigned long long _res; \
6311 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6312 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6313 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6314 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6315 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6316 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6317 __asm__ volatile( \
6318 "ld $4, 8(%1)\n\t" \
6319 "ld $5, 16(%1)\n\t" \
6320 "ld $6, 24(%1)\n\t" \
6321 "ld $7, 32(%1)\n\t" \
6322 "ld $8, 40(%1)\n\t" \
6323 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6324 VALGRIND_CALL_NOREDIR_T9 \
6325 "move %0, $2\n" \
6326 : /*out*/ "=r" (_res) \
6327 : /*in*/ "r" (&_argvec[0]) \
6328 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6329 ); \
6330 lval = (__typeof__(lval)) (long)_res; \
6331 } while (0)
6333 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6334 do { \
6335 volatile OrigFn _orig = (orig); \
6336 volatile unsigned long long _argvec[7]; \
6337 volatile unsigned long long _res; \
6338 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6339 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6340 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6341 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6342 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6343 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6344 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6345 __asm__ volatile( \
6346 "ld $4, 8(%1)\n\t" \
6347 "ld $5, 16(%1)\n\t" \
6348 "ld $6, 24(%1)\n\t" \
6349 "ld $7, 32(%1)\n\t" \
6350 "ld $8, 40(%1)\n\t" \
6351 "ld $9, 48(%1)\n\t" \
6352 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6353 VALGRIND_CALL_NOREDIR_T9 \
6354 "move %0, $2\n" \
6355 : /*out*/ "=r" (_res) \
6356 : /*in*/ "r" (&_argvec[0]) \
6357 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6358 ); \
6359 lval = (__typeof__(lval)) (long)_res; \
6360 } while (0)
6362 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6363 arg7) \
6364 do { \
6365 volatile OrigFn _orig = (orig); \
6366 volatile unsigned long long _argvec[8]; \
6367 volatile unsigned long long _res; \
6368 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6369 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6370 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6371 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6372 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6373 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6374 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6375 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6376 __asm__ volatile( \
6377 "ld $4, 8(%1)\n\t" \
6378 "ld $5, 16(%1)\n\t" \
6379 "ld $6, 24(%1)\n\t" \
6380 "ld $7, 32(%1)\n\t" \
6381 "ld $8, 40(%1)\n\t" \
6382 "ld $9, 48(%1)\n\t" \
6383 "ld $10, 56(%1)\n\t" \
6384 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6385 VALGRIND_CALL_NOREDIR_T9 \
6386 "move %0, $2\n" \
6387 : /*out*/ "=r" (_res) \
6388 : /*in*/ "r" (&_argvec[0]) \
6389 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6390 ); \
6391 lval = (__typeof__(lval)) (long)_res; \
6392 } while (0)
6394 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6395 arg7,arg8) \
6396 do { \
6397 volatile OrigFn _orig = (orig); \
6398 volatile unsigned long long _argvec[9]; \
6399 volatile unsigned long long _res; \
6400 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6401 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6402 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6403 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6404 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6405 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6406 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6407 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6408 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6409 __asm__ volatile( \
6410 "ld $4, 8(%1)\n\t" \
6411 "ld $5, 16(%1)\n\t" \
6412 "ld $6, 24(%1)\n\t" \
6413 "ld $7, 32(%1)\n\t" \
6414 "ld $8, 40(%1)\n\t" \
6415 "ld $9, 48(%1)\n\t" \
6416 "ld $10, 56(%1)\n\t" \
6417 "ld $11, 64(%1)\n\t" \
6418 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6419 VALGRIND_CALL_NOREDIR_T9 \
6420 "move %0, $2\n" \
6421 : /*out*/ "=r" (_res) \
6422 : /*in*/ "r" (&_argvec[0]) \
6423 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6424 ); \
6425 lval = (__typeof__(lval)) (long)_res; \
6426 } while (0)
6428 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6429 arg7,arg8,arg9) \
6430 do { \
6431 volatile OrigFn _orig = (orig); \
6432 volatile unsigned long long _argvec[10]; \
6433 volatile unsigned long long _res; \
6434 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6435 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6436 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6437 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6438 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6439 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6440 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6441 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6442 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6443 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6444 __asm__ volatile( \
6445 "dsubu $29, $29, 8\n\t" \
6446 "ld $4, 72(%1)\n\t" \
6447 "sd $4, 0($29)\n\t" \
6448 "ld $4, 8(%1)\n\t" \
6449 "ld $5, 16(%1)\n\t" \
6450 "ld $6, 24(%1)\n\t" \
6451 "ld $7, 32(%1)\n\t" \
6452 "ld $8, 40(%1)\n\t" \
6453 "ld $9, 48(%1)\n\t" \
6454 "ld $10, 56(%1)\n\t" \
6455 "ld $11, 64(%1)\n\t" \
6456 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6457 VALGRIND_CALL_NOREDIR_T9 \
6458 "daddu $29, $29, 8\n\t" \
6459 "move %0, $2\n" \
6460 : /*out*/ "=r" (_res) \
6461 : /*in*/ "r" (&_argvec[0]) \
6462 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6463 ); \
6464 lval = (__typeof__(lval)) (long)_res; \
6465 } while (0)
6467 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6468 arg7,arg8,arg9,arg10) \
6469 do { \
6470 volatile OrigFn _orig = (orig); \
6471 volatile unsigned long long _argvec[11]; \
6472 volatile unsigned long long _res; \
6473 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6474 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6475 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6476 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6477 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6478 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6479 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6480 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6481 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6482 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6483 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6484 __asm__ volatile( \
6485 "dsubu $29, $29, 16\n\t" \
6486 "ld $4, 72(%1)\n\t" \
6487 "sd $4, 0($29)\n\t" \
6488 "ld $4, 80(%1)\n\t" \
6489 "sd $4, 8($29)\n\t" \
6490 "ld $4, 8(%1)\n\t" \
6491 "ld $5, 16(%1)\n\t" \
6492 "ld $6, 24(%1)\n\t" \
6493 "ld $7, 32(%1)\n\t" \
6494 "ld $8, 40(%1)\n\t" \
6495 "ld $9, 48(%1)\n\t" \
6496 "ld $10, 56(%1)\n\t" \
6497 "ld $11, 64(%1)\n\t" \
6498 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6499 VALGRIND_CALL_NOREDIR_T9 \
6500 "daddu $29, $29, 16\n\t" \
6501 "move %0, $2\n" \
6502 : /*out*/ "=r" (_res) \
6503 : /*in*/ "r" (&_argvec[0]) \
6504 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6505 ); \
6506 lval = (__typeof__(lval)) (long)_res; \
6507 } while (0)
6509 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6510 arg6,arg7,arg8,arg9,arg10, \
6511 arg11) \
6512 do { \
6513 volatile OrigFn _orig = (orig); \
6514 volatile unsigned long long _argvec[12]; \
6515 volatile unsigned long long _res; \
6516 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6517 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6518 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6519 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6520 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6521 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6522 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6523 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6524 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6525 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6526 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6527 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6528 __asm__ volatile( \
6529 "dsubu $29, $29, 24\n\t" \
6530 "ld $4, 72(%1)\n\t" \
6531 "sd $4, 0($29)\n\t" \
6532 "ld $4, 80(%1)\n\t" \
6533 "sd $4, 8($29)\n\t" \
6534 "ld $4, 88(%1)\n\t" \
6535 "sd $4, 16($29)\n\t" \
6536 "ld $4, 8(%1)\n\t" \
6537 "ld $5, 16(%1)\n\t" \
6538 "ld $6, 24(%1)\n\t" \
6539 "ld $7, 32(%1)\n\t" \
6540 "ld $8, 40(%1)\n\t" \
6541 "ld $9, 48(%1)\n\t" \
6542 "ld $10, 56(%1)\n\t" \
6543 "ld $11, 64(%1)\n\t" \
6544 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6545 VALGRIND_CALL_NOREDIR_T9 \
6546 "daddu $29, $29, 24\n\t" \
6547 "move %0, $2\n" \
6548 : /*out*/ "=r" (_res) \
6549 : /*in*/ "r" (&_argvec[0]) \
6550 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6551 ); \
6552 lval = (__typeof__(lval)) (long)_res; \
6553 } while (0)
6555 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6556 arg6,arg7,arg8,arg9,arg10, \
6557 arg11,arg12) \
6558 do { \
6559 volatile OrigFn _orig = (orig); \
6560 volatile unsigned long long _argvec[13]; \
6561 volatile unsigned long long _res; \
6562 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6563 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6564 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6565 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6566 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6567 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6568 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6569 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6570 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6571 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6572 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6573 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6574 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6575 __asm__ volatile( \
6576 "dsubu $29, $29, 32\n\t" \
6577 "ld $4, 72(%1)\n\t" \
6578 "sd $4, 0($29)\n\t" \
6579 "ld $4, 80(%1)\n\t" \
6580 "sd $4, 8($29)\n\t" \
6581 "ld $4, 88(%1)\n\t" \
6582 "sd $4, 16($29)\n\t" \
6583 "ld $4, 96(%1)\n\t" \
6584 "sd $4, 24($29)\n\t" \
6585 "ld $4, 8(%1)\n\t" \
6586 "ld $5, 16(%1)\n\t" \
6587 "ld $6, 24(%1)\n\t" \
6588 "ld $7, 32(%1)\n\t" \
6589 "ld $8, 40(%1)\n\t" \
6590 "ld $9, 48(%1)\n\t" \
6591 "ld $10, 56(%1)\n\t" \
6592 "ld $11, 64(%1)\n\t" \
6593 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6594 VALGRIND_CALL_NOREDIR_T9 \
6595 "daddu $29, $29, 32\n\t" \
6596 "move %0, $2\n" \
6597 : /*out*/ "=r" (_res) \
6598 : /*in*/ "r" (&_argvec[0]) \
6599 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6600 ); \
6601 lval = (__typeof__(lval)) (long)_res; \
6602 } while (0)
6604 #endif /* PLAT_mips64_linux */
6606 /* ------------------------------------------------------------------ */
6607 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6608 /* */
6609 /* ------------------------------------------------------------------ */
6611 /* Some request codes. There are many more of these, but most are not
6612 exposed to end-user view. These are the public ones, all of the
6613 form 0x1000 + small_number.
6615 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6616 ones start at 0x2000.
6619 /* These macros are used by tools -- they must be public, but don't
6620 embed them into other programs. */
6621 #define VG_USERREQ_TOOL_BASE(a,b) \
6622 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6623 #define VG_IS_TOOL_USERREQ(a, b, v) \
6624 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6626 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6627 This enum comprises an ABI exported by Valgrind to programs
6628 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6629 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6630 relevant group. */
6631 typedef
6632 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6633 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6635 /* These allow any function to be called from the simulated
6636 CPU but run on the real CPU. Nb: the first arg passed to
6637 the function is always the ThreadId of the running
6638 thread! So CLIENT_CALL0 actually requires a 1 arg
6639 function, etc. */
6640 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6641 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6642 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6643 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6645 /* Can be useful in regression testing suites -- eg. can
6646 send Valgrind's output to /dev/null and still count
6647 errors. */
6648 VG_USERREQ__COUNT_ERRORS = 0x1201,
6650 /* Allows the client program and/or gdbserver to execute a monitor
6651 command. */
6652 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6654 /* Allows the client program to change a dynamic command line
6655 option. */
6656 VG_USERREQ__CLO_CHANGE = 0x1203,
6658 /* These are useful and can be interpreted by any tool that
6659 tracks malloc() et al, by using vg_replace_malloc.c. */
6660 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6661 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6662 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6663 /* Memory pool support. */
6664 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6665 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6666 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6667 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6668 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6669 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6670 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6671 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6673 /* Allow printfs to valgrind log. */
6674 /* The first two pass the va_list argument by value, which
6675 assumes it is the same size as or smaller than a UWord,
6676 which generally isn't the case. Hence are deprecated.
6677 The second two pass the vargs by reference and so are
6678 immune to this problem. */
6679 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6680 VG_USERREQ__PRINTF = 0x1401,
6681 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6682 /* both :: char* fmt, va_list* vargs */
6683 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6684 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6686 /* Stack support. */
6687 VG_USERREQ__STACK_REGISTER = 0x1501,
6688 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6689 VG_USERREQ__STACK_CHANGE = 0x1503,
6691 /* Wine support */
6692 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6694 /* Querying of debug info. */
6695 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6697 /* Disable/enable error reporting level. Takes a single
6698 Word arg which is the delta to this thread's error
6699 disablement indicator. Hence 1 disables or further
6700 disables errors, and -1 moves back towards enablement.
6701 Other values are not allowed. */
6702 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6704 /* Some requests used for Valgrind internal, such as
6705 self-test or self-hosting. */
6706 /* Initialise IR injection */
6707 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6708 /* Used by Inner Valgrind to inform Outer Valgrind where to
6709 find the list of inner guest threads */
6710 VG_USERREQ__INNER_THREADS = 0x1902
6711 } Vg_ClientRequest;
6713 #if !defined(__GNUC__)
6714 # define __extension__ /* */
6715 #endif
6718 /* Returns the number of Valgrinds this code is running under. That
6719 is, 0 if running natively, 1 if running under Valgrind, 2 if
6720 running under Valgrind which is running under another Valgrind,
6721 etc. */
6722 #define RUNNING_ON_VALGRIND \
6723 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6724 VG_USERREQ__RUNNING_ON_VALGRIND, \
6725 0, 0, 0, 0, 0) \
6728 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6729 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6730 since it provides a way to make sure valgrind will retranslate the
6731 invalidated area. Returns no value. */
6732 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6733 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6734 _qzz_addr, _qzz_len, 0, 0, 0)
6736 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6737 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6738 _qzz_addr, 0, 0, 0, 0)
6741 /* These requests are for getting Valgrind itself to print something.
6742 Possibly with a backtrace. This is a really ugly hack. The return value
6743 is the number of characters printed, excluding the "**<pid>** " part at the
6744 start and the backtrace (if present). */
6746 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6747 /* Modern GCC will optimize the static routine out if unused,
6748 and unused attribute will shut down warnings about it. */
6749 static int VALGRIND_PRINTF(const char *format, ...)
6750 __attribute__((format(__printf__, 1, 2), __unused__));
6751 #endif
6752 static int
6753 #if defined(_MSC_VER)
6754 __inline
6755 #endif
6756 VALGRIND_PRINTF(const char *format, ...)
6758 #if defined(NVALGRIND)
6759 (void)format;
6760 return 0;
6761 #else /* NVALGRIND */
6762 #if defined(_MSC_VER) || defined(__MINGW64__)
6763 uintptr_t _qzz_res;
6764 #else
6765 unsigned long _qzz_res;
6766 #endif
6767 va_list vargs;
6768 va_start(vargs, format);
6769 #if defined(_MSC_VER) || defined(__MINGW64__)
6770 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6771 VG_USERREQ__PRINTF_VALIST_BY_REF,
6772 (uintptr_t)format,
6773 (uintptr_t)&vargs,
6774 0, 0, 0);
6775 #else
6776 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6777 VG_USERREQ__PRINTF_VALIST_BY_REF,
6778 (unsigned long)format,
6779 (unsigned long)&vargs,
6780 0, 0, 0);
6781 #endif
6782 va_end(vargs);
6783 return (int)_qzz_res;
6784 #endif /* NVALGRIND */
6787 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6788 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6789 __attribute__((format(__printf__, 1, 2), __unused__));
6790 #endif
6791 static int
6792 #if defined(_MSC_VER)
6793 __inline
6794 #endif
6795 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6797 #if defined(NVALGRIND)
6798 (void)format;
6799 return 0;
6800 #else /* NVALGRIND */
6801 #if defined(_MSC_VER) || defined(__MINGW64__)
6802 uintptr_t _qzz_res;
6803 #else
6804 unsigned long _qzz_res;
6805 #endif
6806 va_list vargs;
6807 va_start(vargs, format);
6808 #if defined(_MSC_VER) || defined(__MINGW64__)
6809 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6810 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6811 (uintptr_t)format,
6812 (uintptr_t)&vargs,
6813 0, 0, 0);
6814 #else
6815 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6816 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6817 (unsigned long)format,
6818 (unsigned long)&vargs,
6819 0, 0, 0);
6820 #endif
6821 va_end(vargs);
6822 return (int)_qzz_res;
6823 #endif /* NVALGRIND */
6827 /* These requests allow control to move from the simulated CPU to the
6828 real CPU, calling an arbitrary function.
6830 Note that the current ThreadId is inserted as the first argument.
6831 So this call:
6833 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6835 requires f to have this signature:
6837 Word f(Word tid, Word arg1, Word arg2)
6839 where "Word" is a word-sized type.
6841 Note that these client requests are not entirely reliable. For example,
6842 if you call a function with them that subsequently calls printf(),
6843 there's a high chance Valgrind will crash. Generally, your prospects of
6844 these working are made higher if the called function does not refer to
6845 any global variables, and does not refer to any libc or other functions
6846 (printf et al). Any kind of entanglement with libc or dynamic linking is
6847 likely to have a bad outcome, for tricky reasons which we've grappled
6848 with a lot in the past.
6850 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6851 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6852 VG_USERREQ__CLIENT_CALL0, \
6853 _qyy_fn, \
6854 0, 0, 0, 0)
6856 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6857 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6858 VG_USERREQ__CLIENT_CALL1, \
6859 _qyy_fn, \
6860 _qyy_arg1, 0, 0, 0)
6862 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6863 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6864 VG_USERREQ__CLIENT_CALL2, \
6865 _qyy_fn, \
6866 _qyy_arg1, _qyy_arg2, 0, 0)
6868 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6869 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6870 VG_USERREQ__CLIENT_CALL3, \
6871 _qyy_fn, \
6872 _qyy_arg1, _qyy_arg2, \
6873 _qyy_arg3, 0)
6876 /* Counts the number of errors that have been recorded by a tool. Nb:
6877 the tool must record the errors with VG_(maybe_record_error)() or
6878 VG_(unique_error)() for them to be counted. */
6879 #define VALGRIND_COUNT_ERRORS \
6880 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6881 0 /* default return */, \
6882 VG_USERREQ__COUNT_ERRORS, \
6883 0, 0, 0, 0, 0)
6885 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6886 when heap blocks are allocated in order to give accurate results. This
6887 happens automatically for the standard allocator functions such as
6888 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6889 delete[], etc.
6891 But if your program uses a custom allocator, this doesn't automatically
6892 happen, and Valgrind will not do as well. For example, if you allocate
6893 superblocks with mmap() and then allocates chunks of the superblocks, all
6894 Valgrind's observations will be at the mmap() level and it won't know that
6895 the chunks should be considered separate entities. In Memcheck's case,
6896 that means you probably won't get heap block overrun detection (because
6897 there won't be redzones marked as unaddressable) and you definitely won't
6898 get any leak detection.
6900 The following client requests allow a custom allocator to be annotated so
6901 that it can be handled accurately by Valgrind.
6903 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6904 by a malloc()-like function. For Memcheck (an illustrative case), this
6905 does two things:
6907 - It records that the block has been allocated. This means any addresses
6908 within the block mentioned in error messages will be
6909 identified as belonging to the block. It also means that if the block
6910 isn't freed it will be detected by the leak checker.
6912 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6913 not set), or addressable and defined (if 'is_zeroed' is set). This
6914 controls how accesses to the block by the program are handled.
6916 'addr' is the start of the usable block (ie. after any
6917 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6918 can apply redzones -- these are blocks of padding at the start and end of
6919 each block. Adding redzones is recommended as it makes it much more likely
6920 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6921 zeroed (or filled with another predictable value), as is the case for
6922 calloc().
6924 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6925 heap block -- that will be used by the client program -- is allocated.
6926 It's best to put it at the outermost level of the allocator if possible;
6927 for example, if you have a function my_alloc() which calls
6928 internal_alloc(), and the client request is put inside internal_alloc(),
6929 stack traces relating to the heap block will contain entries for both
6930 my_alloc() and internal_alloc(), which is probably not what you want.
6932 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6933 custom blocks from within a heap block, B, that has been allocated with
6934 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6935 -- the custom blocks will take precedence.
6937 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6938 Memcheck, it does two things:
6940 - It records that the block has been deallocated. This assumes that the
6941 block was annotated as having been allocated via
6942 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6944 - It marks the block as being unaddressable.
6946 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6947 heap block is deallocated.
6949 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6950 Memcheck, it does four things:
6952 - It records that the size of a block has been changed. This assumes that
6953 the block was annotated as having been allocated via
6954 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6956 - If the block shrunk, it marks the freed memory as being unaddressable.
6958 - If the block grew, it marks the new area as undefined and defines a red
6959 zone past the end of the new block.
6961 - The V-bits of the overlap between the old and the new block are preserved.
6963 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6964 and before deallocation of the old block.
6966 In many cases, these three client requests will not be enough to get your
6967 allocator working well with Memcheck. More specifically, if your allocator
6968 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6969 will be necessary to mark the memory as addressable just before the zeroing
6970 occurs, otherwise you'll get a lot of invalid write errors. For example,
6971 you'll need to do this if your allocator recycles freed blocks, but it
6972 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6973 Alternatively, if your allocator reuses freed blocks for allocator-internal
6974 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6976 Really, what's happening is a blurring of the lines between the client
6977 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6978 memory should be considered unaddressable to the client program, but the
6979 allocator knows more than the rest of the client program and so may be able
6980 to safely access it. Extra client requests are necessary for Valgrind to
6981 understand the distinction between the allocator and the rest of the
6982 program.
6984 Ignored if addr == 0.
6986 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6987 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6988 addr, sizeB, rzB, is_zeroed, 0)
6990 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6991 Ignored if addr == 0.
6993 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6994 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6995 addr, oldSizeB, newSizeB, rzB, 0)
6997 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6998 Ignored if addr == 0.
7000 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7001 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7002 addr, rzB, 0, 0, 0)
7004 /* Create a memory pool. */
7005 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7006 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7007 pool, rzB, is_zeroed, 0, 0)
7009 /* Create a memory pool with some flags specifying extended behaviour.
7010 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
7012 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
7013 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
7014 by the application as superblocks to dole out MALLOC_LIKE blocks using
7015 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
7016 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
7017 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
7018 Note that the association between the pool and the second level blocks
7019 is implicit : second level blocks will be located inside first level
7020 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
7021 for such 2 levels pools, as otherwise valgrind will detect overlapping
7022 memory blocks, and will abort execution (e.g. during leak search).
7024 Such a meta pool can also be marked as an 'auto free' pool using the flag
7025 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
7026 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
7027 will automatically free the second level blocks that are contained
7028 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
7029 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
7030 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
7031 in the first level block.
7032 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
7033 without the VALGRIND_MEMPOOL_METAPOOL flag.
7035 #define VALGRIND_MEMPOOL_AUTO_FREE 1
7036 #define VALGRIND_MEMPOOL_METAPOOL 2
7037 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7038 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7039 pool, rzB, is_zeroed, flags, 0)
7041 /* Destroy a memory pool. */
7042 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7043 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7044 pool, 0, 0, 0, 0)
7046 /* Associate a piece of memory with a memory pool. */
7047 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7048 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7049 pool, addr, size, 0, 0)
7051 /* Disassociate a piece of memory from a memory pool. */
7052 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7053 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7054 pool, addr, 0, 0, 0)
7056 /* Disassociate any pieces outside a particular range. */
7057 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7058 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7059 pool, addr, size, 0, 0)
7061 /* Resize and/or move a piece associated with a memory pool. */
7062 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7063 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7064 poolA, poolB, 0, 0, 0)
7066 /* Resize and/or move a piece associated with a memory pool. */
7067 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7068 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7069 pool, addrA, addrB, size, 0)
7071 /* Return 1 if a mempool exists, else 0. */
7072 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7073 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7074 VG_USERREQ__MEMPOOL_EXISTS, \
7075 pool, 0, 0, 0, 0)
7077 /* Mark a piece of memory as being a stack. Returns a stack id.
7078 start is the lowest addressable stack byte, end is the highest
7079 addressable stack byte. */
7080 #define VALGRIND_STACK_REGISTER(start, end) \
7081 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7082 VG_USERREQ__STACK_REGISTER, \
7083 start, end, 0, 0, 0)
7085 /* Unmark the piece of memory associated with a stack id as being a
7086 stack. */
7087 #define VALGRIND_STACK_DEREGISTER(id) \
7088 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7089 id, 0, 0, 0, 0)
7091 /* Change the start and end address of the stack id.
7092 start is the new lowest addressable stack byte, end is the new highest
7093 addressable stack byte. */
7094 #define VALGRIND_STACK_CHANGE(id, start, end) \
7095 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7096 id, start, end, 0, 0)
7098 /* Load PDB debug info for Wine PE image_map. */
7099 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7100 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7101 fd, ptr, total_size, delta, 0)
7103 /* Map a code address to a source file name and line number. buf64
7104 must point to a 64-byte buffer in the caller's address space. The
7105 result will be dumped in there and is guaranteed to be zero
7106 terminated. If no info is found, the first byte is set to zero. */
7107 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7108 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7109 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7110 addr, buf64, 0, 0, 0)
7112 /* Disable error reporting for this thread. Behaves in a stack like
7113 way, so you can safely call this multiple times provided that
7114 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7115 to re-enable reporting. The first call of this macro disables
7116 reporting. Subsequent calls have no effect except to increase the
7117 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7118 reporting. Child threads do not inherit this setting from their
7119 parents -- they are always created with reporting enabled. */
7120 #define VALGRIND_DISABLE_ERROR_REPORTING \
7121 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7122 1, 0, 0, 0, 0)
7124 /* Re-enable error reporting, as per comments on
7125 VALGRIND_DISABLE_ERROR_REPORTING. */
7126 #define VALGRIND_ENABLE_ERROR_REPORTING \
7127 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7128 -1, 0, 0, 0, 0)
7130 /* Execute a monitor command from the client program.
7131 If a connection is opened with GDB, the output will be sent
7132 according to the output mode set for vgdb.
7133 If no connection is opened, output will go to the log output.
7134 Returns 1 if command not recognised, 0 otherwise. */
7135 #define VALGRIND_MONITOR_COMMAND(command) \
7136 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7137 command, 0, 0, 0, 0)
7140 /* Change the value of a dynamic command line option.
7141 Note that unknown or not dynamically changeable options
7142 will cause a warning message to be output. */
7143 #define VALGRIND_CLO_CHANGE(option) \
7144 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7145 option, 0, 0, 0, 0)
7148 #undef PLAT_x86_darwin
7149 #undef PLAT_amd64_darwin
7150 #undef PLAT_x86_win32
7151 #undef PLAT_amd64_win64
7152 #undef PLAT_x86_linux
7153 #undef PLAT_amd64_linux
7154 #undef PLAT_ppc32_linux
7155 #undef PLAT_ppc64be_linux
7156 #undef PLAT_ppc64le_linux
7157 #undef PLAT_arm_linux
7158 #undef PLAT_s390x_linux
7159 #undef PLAT_mips32_linux
7160 #undef PLAT_mips64_linux
7161 #undef PLAT_nanomips_linux
7162 #undef PLAT_x86_solaris
7163 #undef PLAT_amd64_solaris
7165 #endif /* __VALGRIND_H */