xen: support for XEN_DOMCTL_iomem_permission
[valgrind.git] / include / valgrind.h
blob4baf855f4cbcf4ff96d797bfcd5436d78a32dd91
1 /* -*- c -*-
2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
15 Copyright (C) 2000-2013 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 10
95 #include <stdarg.h>
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64be_linux
119 #undef PLAT_ppc64le_linux
120 #undef PLAT_arm_linux
121 #undef PLAT_arm64_linux
122 #undef PLAT_s390x_linux
123 #undef PLAT_mips32_linux
124 #undef PLAT_mips64_linux
125 #undef PLAT_tilegx_linux
128 #if defined(__APPLE__) && defined(__i386__)
129 # define PLAT_x86_darwin 1
130 #elif defined(__APPLE__) && defined(__x86_64__)
131 # define PLAT_amd64_darwin 1
132 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
133 || defined(__CYGWIN32__) \
134 || (defined(_WIN32) && defined(_M_IX86))
135 # define PLAT_x86_win32 1
136 #elif defined(__MINGW64__) \
137 || (defined(_WIN64) && defined(_M_X64))
138 # define PLAT_amd64_win64 1
139 #elif defined(__linux__) && defined(__i386__)
140 # define PLAT_x86_linux 1
141 #elif defined(__linux__) && defined(__x86_64__)
142 # define PLAT_amd64_linux 1
143 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
144 # define PLAT_ppc32_linux 1
145 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
146 /* Big Endian uses ELF version 1 */
147 # define PLAT_ppc64be_linux 1
148 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
149 /* Little Endian uses ELF version 2 */
150 # define PLAT_ppc64le_linux 1
151 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
152 # define PLAT_arm_linux 1
153 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
154 # define PLAT_arm64_linux 1
155 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
156 # define PLAT_s390x_linux 1
157 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
158 # define PLAT_mips64_linux 1
159 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
160 # define PLAT_mips32_linux 1
161 #elif defined(__linux__) && defined(__tilegx__)
162 # define PLAT_tilegx_linux 1
163 #else
164 /* If we're not compiling for our target platform, don't generate
165 any inline asms. */
166 # if !defined(NVALGRIND)
167 # define NVALGRIND 1
168 # endif
169 #endif
172 /* ------------------------------------------------------------------ */
173 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
174 /* in here of use to end-users -- skip to the next section. */
175 /* ------------------------------------------------------------------ */
178 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
179 * request. Accepts both pointers and integers as arguments.
181 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
182 * client request that does not return a value.
184 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
185 * client request and whose value equals the client request result. Accepts
186 * both pointers and integers as arguments. Note that such calls are not
187 * necessarily pure functions -- they may have side effects.
190 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
191 _zzq_request, _zzq_arg1, _zzq_arg2, \
192 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
193 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
194 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
195 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
197 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
198 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
199 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
200 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
201 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
203 #if defined(NVALGRIND)
205 /* Define NVALGRIND to completely remove the Valgrind magic sequence
206 from the compiled code (analogous to NDEBUG's effects on
207 assert()) */
208 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
209 _zzq_default, _zzq_request, \
210 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
211 (_zzq_default)
213 #else /* ! NVALGRIND */
215 /* The following defines the magic code sequences which the JITter
216 spots and handles magically. Don't look too closely at them as
217 they will rot your brain.
219 The assembly code sequences for all architectures is in this one
220 file. This is because this file must be stand-alone, and we don't
221 want to have multiple files.
223 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
224 value gets put in the return slot, so that everything works when
225 this is executed not under Valgrind. Args are passed in a memory
226 block, and so there's no intrinsic limit to the number that could
227 be passed, but it's currently five.
229 The macro args are:
230 _zzq_rlval result lvalue
231 _zzq_default default value (result returned when running on real CPU)
232 _zzq_request request code
233 _zzq_arg1..5 request params
235 The other two macros are used to support function wrapping, and are
236 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
237 guest's NRADDR pseudo-register and whatever other information is
238 needed to safely run the call original from the wrapper: on
239 ppc64-linux, the R2 value at the divert point is also needed. This
240 information is abstracted into a user-visible type, OrigFn.
242 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
243 guest, but guarantees that the branch instruction will not be
244 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
245 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
246 complete inline asm, since it needs to be combined with more magic
247 inline asm stuff to be useful.
250 /* ------------------------- x86-{linux,darwin} ---------------- */
252 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
253 || (defined(PLAT_x86_win32) && defined(__GNUC__))
255 typedef
256 struct {
257 unsigned int nraddr; /* where's the code? */
259 OrigFn;
261 #define __SPECIAL_INSTRUCTION_PREAMBLE \
262 "roll $3, %%edi ; roll $13, %%edi\n\t" \
263 "roll $29, %%edi ; roll $19, %%edi\n\t"
265 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
266 _zzq_default, _zzq_request, \
267 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
268 __extension__ \
269 ({volatile unsigned int _zzq_args[6]; \
270 volatile unsigned int _zzq_result; \
271 _zzq_args[0] = (unsigned int)(_zzq_request); \
272 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
273 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
274 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
275 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
276 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
277 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
278 /* %EDX = client_request ( %EAX ) */ \
279 "xchgl %%ebx,%%ebx" \
280 : "=d" (_zzq_result) \
281 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
282 : "cc", "memory" \
283 ); \
284 _zzq_result; \
287 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
288 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
289 volatile unsigned int __addr; \
290 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
291 /* %EAX = guest_NRADDR */ \
292 "xchgl %%ecx,%%ecx" \
293 : "=a" (__addr) \
295 : "cc", "memory" \
296 ); \
297 _zzq_orig->nraddr = __addr; \
300 #define VALGRIND_CALL_NOREDIR_EAX \
301 __SPECIAL_INSTRUCTION_PREAMBLE \
302 /* call-noredir *%EAX */ \
303 "xchgl %%edx,%%edx\n\t"
305 #define VALGRIND_VEX_INJECT_IR() \
306 do { \
307 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
308 "xchgl %%edi,%%edi\n\t" \
309 : : : "cc", "memory" \
310 ); \
311 } while (0)
313 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
315 /* ------------------------- x86-Win32 ------------------------- */
317 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
319 typedef
320 struct {
321 unsigned int nraddr; /* where's the code? */
323 OrigFn;
325 #if defined(_MSC_VER)
327 #define __SPECIAL_INSTRUCTION_PREAMBLE \
328 __asm rol edi, 3 __asm rol edi, 13 \
329 __asm rol edi, 29 __asm rol edi, 19
331 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
332 _zzq_default, _zzq_request, \
333 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
334 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
335 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
336 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
337 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
339 static __inline uintptr_t
340 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
341 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
342 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
343 uintptr_t _zzq_arg5)
345 volatile uintptr_t _zzq_args[6];
346 volatile unsigned int _zzq_result;
347 _zzq_args[0] = (uintptr_t)(_zzq_request);
348 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
349 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
350 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
351 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
352 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
353 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
354 __SPECIAL_INSTRUCTION_PREAMBLE
355 /* %EDX = client_request ( %EAX ) */
356 __asm xchg ebx,ebx
357 __asm mov _zzq_result, edx
359 return _zzq_result;
362 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
363 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
364 volatile unsigned int __addr; \
365 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
366 /* %EAX = guest_NRADDR */ \
367 __asm xchg ecx,ecx \
368 __asm mov __addr, eax \
370 _zzq_orig->nraddr = __addr; \
373 #define VALGRIND_CALL_NOREDIR_EAX ERROR
375 #define VALGRIND_VEX_INJECT_IR() \
376 do { \
377 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
378 __asm xchg edi,edi \
380 } while (0)
382 #else
383 #error Unsupported compiler.
384 #endif
386 #endif /* PLAT_x86_win32 */
388 /* ------------------------ amd64-{linux,darwin} --------------- */
390 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
391 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
393 typedef
394 struct {
395 unsigned long int nraddr; /* where's the code? */
397 OrigFn;
399 #define __SPECIAL_INSTRUCTION_PREAMBLE \
400 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
401 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
403 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
404 _zzq_default, _zzq_request, \
405 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
406 __extension__ \
407 ({ volatile unsigned long int _zzq_args[6]; \
408 volatile unsigned long int _zzq_result; \
409 _zzq_args[0] = (unsigned long int)(_zzq_request); \
410 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
411 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
412 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
413 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
414 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
415 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
416 /* %RDX = client_request ( %RAX ) */ \
417 "xchgq %%rbx,%%rbx" \
418 : "=d" (_zzq_result) \
419 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
420 : "cc", "memory" \
421 ); \
422 _zzq_result; \
425 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
426 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
427 volatile unsigned long int __addr; \
428 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
429 /* %RAX = guest_NRADDR */ \
430 "xchgq %%rcx,%%rcx" \
431 : "=a" (__addr) \
433 : "cc", "memory" \
434 ); \
435 _zzq_orig->nraddr = __addr; \
438 #define VALGRIND_CALL_NOREDIR_RAX \
439 __SPECIAL_INSTRUCTION_PREAMBLE \
440 /* call-noredir *%RAX */ \
441 "xchgq %%rdx,%%rdx\n\t"
443 #define VALGRIND_VEX_INJECT_IR() \
444 do { \
445 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
446 "xchgq %%rdi,%%rdi\n\t" \
447 : : : "cc", "memory" \
448 ); \
449 } while (0)
451 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
453 /* ------------------------- amd64-Win64 ------------------------- */
455 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
457 #error Unsupported compiler.
459 #endif /* PLAT_amd64_win64 */
461 /* ------------------------ ppc32-linux ------------------------ */
463 #if defined(PLAT_ppc32_linux)
465 typedef
466 struct {
467 unsigned int nraddr; /* where's the code? */
469 OrigFn;
471 #define __SPECIAL_INSTRUCTION_PREAMBLE \
472 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
473 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
475 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
476 _zzq_default, _zzq_request, \
477 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
479 __extension__ \
480 ({ unsigned int _zzq_args[6]; \
481 unsigned int _zzq_result; \
482 unsigned int* _zzq_ptr; \
483 _zzq_args[0] = (unsigned int)(_zzq_request); \
484 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
485 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
486 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
487 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
488 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
489 _zzq_ptr = _zzq_args; \
490 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
491 "mr 4,%2\n\t" /*ptr*/ \
492 __SPECIAL_INSTRUCTION_PREAMBLE \
493 /* %R3 = client_request ( %R4 ) */ \
494 "or 1,1,1\n\t" \
495 "mr %0,3" /*result*/ \
496 : "=b" (_zzq_result) \
497 : "b" (_zzq_default), "b" (_zzq_ptr) \
498 : "cc", "memory", "r3", "r4"); \
499 _zzq_result; \
502 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
503 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
504 unsigned int __addr; \
505 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
506 /* %R3 = guest_NRADDR */ \
507 "or 2,2,2\n\t" \
508 "mr %0,3" \
509 : "=b" (__addr) \
511 : "cc", "memory", "r3" \
512 ); \
513 _zzq_orig->nraddr = __addr; \
516 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
517 __SPECIAL_INSTRUCTION_PREAMBLE \
518 /* branch-and-link-to-noredir *%R11 */ \
519 "or 3,3,3\n\t"
521 #define VALGRIND_VEX_INJECT_IR() \
522 do { \
523 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
524 "or 5,5,5\n\t" \
525 ); \
526 } while (0)
528 #endif /* PLAT_ppc32_linux */
530 /* ------------------------ ppc64-linux ------------------------ */
532 #if defined(PLAT_ppc64be_linux)
534 typedef
535 struct {
536 unsigned long int nraddr; /* where's the code? */
537 unsigned long int r2; /* what tocptr do we need? */
539 OrigFn;
541 #define __SPECIAL_INSTRUCTION_PREAMBLE \
542 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
543 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
545 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
546 _zzq_default, _zzq_request, \
547 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
549 __extension__ \
550 ({ unsigned long int _zzq_args[6]; \
551 unsigned long int _zzq_result; \
552 unsigned long int* _zzq_ptr; \
553 _zzq_args[0] = (unsigned long int)(_zzq_request); \
554 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
555 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
556 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
557 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
558 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
559 _zzq_ptr = _zzq_args; \
560 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
561 "mr 4,%2\n\t" /*ptr*/ \
562 __SPECIAL_INSTRUCTION_PREAMBLE \
563 /* %R3 = client_request ( %R4 ) */ \
564 "or 1,1,1\n\t" \
565 "mr %0,3" /*result*/ \
566 : "=b" (_zzq_result) \
567 : "b" (_zzq_default), "b" (_zzq_ptr) \
568 : "cc", "memory", "r3", "r4"); \
569 _zzq_result; \
572 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
573 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
574 unsigned long int __addr; \
575 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
576 /* %R3 = guest_NRADDR */ \
577 "or 2,2,2\n\t" \
578 "mr %0,3" \
579 : "=b" (__addr) \
581 : "cc", "memory", "r3" \
582 ); \
583 _zzq_orig->nraddr = __addr; \
584 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
585 /* %R3 = guest_NRADDR_GPR2 */ \
586 "or 4,4,4\n\t" \
587 "mr %0,3" \
588 : "=b" (__addr) \
590 : "cc", "memory", "r3" \
591 ); \
592 _zzq_orig->r2 = __addr; \
595 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
596 __SPECIAL_INSTRUCTION_PREAMBLE \
597 /* branch-and-link-to-noredir *%R11 */ \
598 "or 3,3,3\n\t"
600 #define VALGRIND_VEX_INJECT_IR() \
601 do { \
602 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
603 "or 5,5,5\n\t" \
604 ); \
605 } while (0)
607 #endif /* PLAT_ppc64be_linux */
609 #if defined(PLAT_ppc64le_linux)
611 typedef
612 struct {
613 unsigned long int nraddr; /* where's the code? */
614 unsigned long int r2; /* what tocptr do we need? */
616 OrigFn;
618 #define __SPECIAL_INSTRUCTION_PREAMBLE \
619 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
620 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
622 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
623 _zzq_default, _zzq_request, \
624 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
626 __extension__ \
627 ({ unsigned long int _zzq_args[6]; \
628 unsigned long int _zzq_result; \
629 unsigned long int* _zzq_ptr; \
630 _zzq_args[0] = (unsigned long int)(_zzq_request); \
631 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
632 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
633 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
634 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
635 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
636 _zzq_ptr = _zzq_args; \
637 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
638 "mr 4,%2\n\t" /*ptr*/ \
639 __SPECIAL_INSTRUCTION_PREAMBLE \
640 /* %R3 = client_request ( %R4 ) */ \
641 "or 1,1,1\n\t" \
642 "mr %0,3" /*result*/ \
643 : "=b" (_zzq_result) \
644 : "b" (_zzq_default), "b" (_zzq_ptr) \
645 : "cc", "memory", "r3", "r4"); \
646 _zzq_result; \
649 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
650 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
651 unsigned long int __addr; \
652 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
653 /* %R3 = guest_NRADDR */ \
654 "or 2,2,2\n\t" \
655 "mr %0,3" \
656 : "=b" (__addr) \
658 : "cc", "memory", "r3" \
659 ); \
660 _zzq_orig->nraddr = __addr; \
661 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
662 /* %R3 = guest_NRADDR_GPR2 */ \
663 "or 4,4,4\n\t" \
664 "mr %0,3" \
665 : "=b" (__addr) \
667 : "cc", "memory", "r3" \
668 ); \
669 _zzq_orig->r2 = __addr; \
672 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
673 __SPECIAL_INSTRUCTION_PREAMBLE \
674 /* branch-and-link-to-noredir *%R12 */ \
675 "or 3,3,3\n\t"
677 #define VALGRIND_VEX_INJECT_IR() \
678 do { \
679 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
680 "or 5,5,5\n\t" \
681 ); \
682 } while (0)
684 #endif /* PLAT_ppc64le_linux */
686 /* ------------------------- arm-linux ------------------------- */
688 #if defined(PLAT_arm_linux)
690 typedef
691 struct {
692 unsigned int nraddr; /* where's the code? */
694 OrigFn;
696 #define __SPECIAL_INSTRUCTION_PREAMBLE \
697 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
698 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
700 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
701 _zzq_default, _zzq_request, \
702 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
704 __extension__ \
705 ({volatile unsigned int _zzq_args[6]; \
706 volatile unsigned int _zzq_result; \
707 _zzq_args[0] = (unsigned int)(_zzq_request); \
708 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
709 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
710 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
711 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
712 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
713 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
714 "mov r4, %2\n\t" /*ptr*/ \
715 __SPECIAL_INSTRUCTION_PREAMBLE \
716 /* R3 = client_request ( R4 ) */ \
717 "orr r10, r10, r10\n\t" \
718 "mov %0, r3" /*result*/ \
719 : "=r" (_zzq_result) \
720 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
721 : "cc","memory", "r3", "r4"); \
722 _zzq_result; \
725 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
726 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
727 unsigned int __addr; \
728 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
729 /* R3 = guest_NRADDR */ \
730 "orr r11, r11, r11\n\t" \
731 "mov %0, r3" \
732 : "=r" (__addr) \
734 : "cc", "memory", "r3" \
735 ); \
736 _zzq_orig->nraddr = __addr; \
739 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
740 __SPECIAL_INSTRUCTION_PREAMBLE \
741 /* branch-and-link-to-noredir *%R4 */ \
742 "orr r12, r12, r12\n\t"
744 #define VALGRIND_VEX_INJECT_IR() \
745 do { \
746 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
747 "orr r9, r9, r9\n\t" \
748 : : : "cc", "memory" \
749 ); \
750 } while (0)
752 #endif /* PLAT_arm_linux */
754 /* ------------------------ arm64-linux ------------------------- */
756 #if defined(PLAT_arm64_linux)
758 typedef
759 struct {
760 unsigned long int nraddr; /* where's the code? */
762 OrigFn;
764 #define __SPECIAL_INSTRUCTION_PREAMBLE \
765 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
766 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
768 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
769 _zzq_default, _zzq_request, \
770 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
772 __extension__ \
773 ({volatile unsigned long int _zzq_args[6]; \
774 volatile unsigned long int _zzq_result; \
775 _zzq_args[0] = (unsigned long int)(_zzq_request); \
776 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
777 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
778 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
779 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
780 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
781 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
782 "mov x4, %2\n\t" /*ptr*/ \
783 __SPECIAL_INSTRUCTION_PREAMBLE \
784 /* X3 = client_request ( X4 ) */ \
785 "orr x10, x10, x10\n\t" \
786 "mov %0, x3" /*result*/ \
787 : "=r" (_zzq_result) \
788 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
789 : "cc","memory", "x3", "x4"); \
790 _zzq_result; \
793 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
794 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
795 unsigned long int __addr; \
796 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
797 /* X3 = guest_NRADDR */ \
798 "orr x11, x11, x11\n\t" \
799 "mov %0, x3" \
800 : "=r" (__addr) \
802 : "cc", "memory", "x3" \
803 ); \
804 _zzq_orig->nraddr = __addr; \
807 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
808 __SPECIAL_INSTRUCTION_PREAMBLE \
809 /* branch-and-link-to-noredir X8 */ \
810 "orr x12, x12, x12\n\t"
812 #define VALGRIND_VEX_INJECT_IR() \
813 do { \
814 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
815 "orr x9, x9, x9\n\t" \
816 : : : "cc", "memory" \
817 ); \
818 } while (0)
820 #endif /* PLAT_arm64_linux */
822 /* ------------------------ s390x-linux ------------------------ */
824 #if defined(PLAT_s390x_linux)
826 typedef
827 struct {
828 unsigned long int nraddr; /* where's the code? */
830 OrigFn;
832 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
833 * code. This detection is implemented in platform specific toIR.c
834 * (e.g. VEX/priv/guest_s390_decoder.c).
836 #define __SPECIAL_INSTRUCTION_PREAMBLE \
837 "lr 15,15\n\t" \
838 "lr 1,1\n\t" \
839 "lr 2,2\n\t" \
840 "lr 3,3\n\t"
842 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
843 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
844 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
845 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
847 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
848 _zzq_default, _zzq_request, \
849 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
850 __extension__ \
851 ({volatile unsigned long int _zzq_args[6]; \
852 volatile unsigned long int _zzq_result; \
853 _zzq_args[0] = (unsigned long int)(_zzq_request); \
854 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
855 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
856 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
857 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
858 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
859 __asm__ volatile(/* r2 = args */ \
860 "lgr 2,%1\n\t" \
861 /* r3 = default */ \
862 "lgr 3,%2\n\t" \
863 __SPECIAL_INSTRUCTION_PREAMBLE \
864 __CLIENT_REQUEST_CODE \
865 /* results = r3 */ \
866 "lgr %0, 3\n\t" \
867 : "=d" (_zzq_result) \
868 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
869 : "cc", "2", "3", "memory" \
870 ); \
871 _zzq_result; \
874 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
875 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
876 volatile unsigned long int __addr; \
877 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
878 __GET_NR_CONTEXT_CODE \
879 "lgr %0, 3\n\t" \
880 : "=a" (__addr) \
882 : "cc", "3", "memory" \
883 ); \
884 _zzq_orig->nraddr = __addr; \
887 #define VALGRIND_CALL_NOREDIR_R1 \
888 __SPECIAL_INSTRUCTION_PREAMBLE \
889 __CALL_NO_REDIR_CODE
891 #define VALGRIND_VEX_INJECT_IR() \
892 do { \
893 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
894 __VEX_INJECT_IR_CODE); \
895 } while (0)
897 #endif /* PLAT_s390x_linux */
899 /* ------------------------- mips32-linux ---------------- */
901 #if defined(PLAT_mips32_linux)
903 typedef
904 struct {
905 unsigned int nraddr; /* where's the code? */
907 OrigFn;
909 /* .word 0x342
910 * .word 0x742
911 * .word 0xC2
912 * .word 0x4C2*/
913 #define __SPECIAL_INSTRUCTION_PREAMBLE \
914 "srl $0, $0, 13\n\t" \
915 "srl $0, $0, 29\n\t" \
916 "srl $0, $0, 3\n\t" \
917 "srl $0, $0, 19\n\t"
919 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
920 _zzq_default, _zzq_request, \
921 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
922 __extension__ \
923 ({ volatile unsigned int _zzq_args[6]; \
924 volatile unsigned int _zzq_result; \
925 _zzq_args[0] = (unsigned int)(_zzq_request); \
926 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
927 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
928 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
929 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
930 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
931 __asm__ volatile("move $11, %1\n\t" /*default*/ \
932 "move $12, %2\n\t" /*ptr*/ \
933 __SPECIAL_INSTRUCTION_PREAMBLE \
934 /* T3 = client_request ( T4 ) */ \
935 "or $13, $13, $13\n\t" \
936 "move %0, $11\n\t" /*result*/ \
937 : "=r" (_zzq_result) \
938 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
939 : "$11", "$12"); \
940 _zzq_result; \
943 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
944 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
945 volatile unsigned int __addr; \
946 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
947 /* %t9 = guest_NRADDR */ \
948 "or $14, $14, $14\n\t" \
949 "move %0, $11" /*result*/ \
950 : "=r" (__addr) \
952 : "$11" \
953 ); \
954 _zzq_orig->nraddr = __addr; \
957 #define VALGRIND_CALL_NOREDIR_T9 \
958 __SPECIAL_INSTRUCTION_PREAMBLE \
959 /* call-noredir *%t9 */ \
960 "or $15, $15, $15\n\t"
962 #define VALGRIND_VEX_INJECT_IR() \
963 do { \
964 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
965 "or $11, $11, $11\n\t" \
966 ); \
967 } while (0)
970 #endif /* PLAT_mips32_linux */
972 /* ------------------------- mips64-linux ---------------- */
974 #if defined(PLAT_mips64_linux)
976 typedef
977 struct {
978 unsigned long nraddr; /* where's the code? */
980 OrigFn;
982 /* dsll $0,$0, 3
983 * dsll $0,$0, 13
984 * dsll $0,$0, 29
985 * dsll $0,$0, 19*/
986 #define __SPECIAL_INSTRUCTION_PREAMBLE \
987 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
988 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
990 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
991 _zzq_default, _zzq_request, \
992 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
993 __extension__ \
994 ({ volatile unsigned long int _zzq_args[6]; \
995 volatile unsigned long int _zzq_result; \
996 _zzq_args[0] = (unsigned long int)(_zzq_request); \
997 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
998 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
999 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1000 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1001 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1002 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1003 "move $12, %2\n\t" /*ptr*/ \
1004 __SPECIAL_INSTRUCTION_PREAMBLE \
1005 /* $11 = client_request ( $12 ) */ \
1006 "or $13, $13, $13\n\t" \
1007 "move %0, $11\n\t" /*result*/ \
1008 : "=r" (_zzq_result) \
1009 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1010 : "$11", "$12"); \
1011 _zzq_result; \
1014 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1015 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1016 volatile unsigned long int __addr; \
1017 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1018 /* $11 = guest_NRADDR */ \
1019 "or $14, $14, $14\n\t" \
1020 "move %0, $11" /*result*/ \
1021 : "=r" (__addr) \
1023 : "$11"); \
1024 _zzq_orig->nraddr = __addr; \
1027 #define VALGRIND_CALL_NOREDIR_T9 \
1028 __SPECIAL_INSTRUCTION_PREAMBLE \
1029 /* call-noredir $25 */ \
1030 "or $15, $15, $15\n\t"
1032 #define VALGRIND_VEX_INJECT_IR() \
1033 do { \
1034 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1035 "or $11, $11, $11\n\t" \
1036 ); \
1037 } while (0)
1039 #endif /* PLAT_mips64_linux */
1041 /* ------------------------ tilegx-linux --------------- */
1042 #if defined(PLAT_tilegx_linux)
1044 typedef
1045 struct {
1046 unsigned long long int nraddr; /* where's the code? */
1048 OrigFn;
1049 /*** special instruction sequence.
1050 0:02b3c7ff91234fff { moveli zero, 4660 ; moveli zero, 22136 }
1051 8:0091a7ff95678fff { moveli zero, 22136 ; moveli zero, 4660 }
1052 ****/
1054 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1055 ".quad 0x02b3c7ff91234fff\n" \
1056 ".quad 0x0091a7ff95678fff\n"
1058 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1059 _zzq_default, _zzq_request, \
1060 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1061 ({ volatile unsigned long long int _zzq_args[6]; \
1062 volatile unsigned long long int _zzq_result; \
1063 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
1064 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
1065 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
1066 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
1067 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
1068 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
1069 __asm__ volatile("move r11, %1\n\t" /*default*/ \
1070 "move r12, %2\n\t" /*ptr*/ \
1071 __SPECIAL_INSTRUCTION_PREAMBLE \
1072 /* r11 = client_request */ \
1073 "or r13, r13, r13\n\t" \
1074 "move %0, r11\n\t" /*result*/ \
1075 : "=r" (_zzq_result) \
1076 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1077 : "memory", "r11", "r12"); \
1078 _zzq_result; \
1081 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1082 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1083 volatile unsigned long long int __addr; \
1084 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1085 /* r11 = guest_NRADDR */ \
1086 "or r14, r14, r14\n" \
1087 "move %0, r11\n" \
1088 : "=r" (__addr) \
1090 : "memory", "r11" \
1091 ); \
1092 _zzq_orig->nraddr = __addr; \
1095 #define VALGRIND_CALL_NOREDIR_R12 \
1096 __SPECIAL_INSTRUCTION_PREAMBLE \
1097 "or r15, r15, r15\n\t"
1099 #define VALGRIND_VEX_INJECT_IR() \
1100 do { \
1101 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1102 "or r11, r11, r11\n\t" \
1103 ); \
1104 } while (0)
1106 #endif /* PLAT_tilegx_linux */
1108 /* Insert assembly code for other platforms here... */
1110 #endif /* NVALGRIND */
1113 /* ------------------------------------------------------------------ */
1114 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1115 /* ugly. It's the least-worst tradeoff I can think of. */
1116 /* ------------------------------------------------------------------ */
1118 /* This section defines magic (a.k.a appalling-hack) macros for doing
1119 guaranteed-no-redirection macros, so as to get from function
1120 wrappers to the functions they are wrapping. The whole point is to
1121 construct standard call sequences, but to do the call itself with a
1122 special no-redirect call pseudo-instruction that the JIT
1123 understands and handles specially. This section is long and
1124 repetitious, and I can't see a way to make it shorter.
1126 The naming scheme is as follows:
1128 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1130 'W' stands for "word" and 'v' for "void". Hence there are
1131 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1132 and for each, the possibility of returning a word-typed result, or
1133 no result.
1136 /* Use these to write the name of your wrapper. NOTE: duplicates
1137 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1138 the default behaviour equivalance class tag "0000" into the name.
1139 See pub_tool_redir.h for details -- normally you don't need to
1140 think about this, though. */
1142 /* Use an extra level of macroisation so as to ensure the soname/fnname
1143 args are fully macro-expanded before pasting them together. */
1144 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1146 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1147 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1149 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1150 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1152 /* Use this macro from within a wrapper function to collect the
1153 context (address and possibly other info) of the original function.
1154 Once you have that you can then use it in one of the CALL_FN_
1155 macros. The type of the argument _lval is OrigFn. */
1156 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1158 /* Also provide end-user facilities for function replacement, rather
1159 than wrapping. A replacement function differs from a wrapper in
1160 that it has no way to get hold of the original function being
1161 called, and hence no way to call onwards to it. In a replacement
1162 function, VALGRIND_GET_ORIG_FN always returns zero. */
1164 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1165 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1167 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1168 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1170 /* Derivatives of the main macros below, for calling functions
1171 returning void. */
1173 #define CALL_FN_v_v(fnptr) \
1174 do { volatile unsigned long _junk; \
1175 CALL_FN_W_v(_junk,fnptr); } while (0)
1177 #define CALL_FN_v_W(fnptr, arg1) \
1178 do { volatile unsigned long _junk; \
1179 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1181 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1182 do { volatile unsigned long _junk; \
1183 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1185 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1186 do { volatile unsigned long _junk; \
1187 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1189 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1190 do { volatile unsigned long _junk; \
1191 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1193 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1194 do { volatile unsigned long _junk; \
1195 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1197 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1198 do { volatile unsigned long _junk; \
1199 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1201 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1202 do { volatile unsigned long _junk; \
1203 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1205 /* ------------------------- x86-{linux,darwin} ---------------- */
1207 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1209 /* These regs are trashed by the hidden call. No need to mention eax
1210 as gcc can already see that, plus causes gcc to bomb. */
1211 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1213 /* Macros to save and align the stack before making a function
1214 call and restore it afterwards as gcc may not keep the stack
1215 pointer aligned if it doesn't realise calls are being made
1216 to other functions. */
1218 #define VALGRIND_ALIGN_STACK \
1219 "movl %%esp,%%edi\n\t" \
1220 "andl $0xfffffff0,%%esp\n\t"
1221 #define VALGRIND_RESTORE_STACK \
1222 "movl %%edi,%%esp\n\t"
1224 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1225 long) == 4. */
1227 #define CALL_FN_W_v(lval, orig) \
1228 do { \
1229 volatile OrigFn _orig = (orig); \
1230 volatile unsigned long _argvec[1]; \
1231 volatile unsigned long _res; \
1232 _argvec[0] = (unsigned long)_orig.nraddr; \
1233 __asm__ volatile( \
1234 VALGRIND_ALIGN_STACK \
1235 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1236 VALGRIND_CALL_NOREDIR_EAX \
1237 VALGRIND_RESTORE_STACK \
1238 : /*out*/ "=a" (_res) \
1239 : /*in*/ "a" (&_argvec[0]) \
1240 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1241 ); \
1242 lval = (__typeof__(lval)) _res; \
1243 } while (0)
1245 #define CALL_FN_W_W(lval, orig, arg1) \
1246 do { \
1247 volatile OrigFn _orig = (orig); \
1248 volatile unsigned long _argvec[2]; \
1249 volatile unsigned long _res; \
1250 _argvec[0] = (unsigned long)_orig.nraddr; \
1251 _argvec[1] = (unsigned long)(arg1); \
1252 __asm__ volatile( \
1253 VALGRIND_ALIGN_STACK \
1254 "subl $12, %%esp\n\t" \
1255 "pushl 4(%%eax)\n\t" \
1256 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1257 VALGRIND_CALL_NOREDIR_EAX \
1258 VALGRIND_RESTORE_STACK \
1259 : /*out*/ "=a" (_res) \
1260 : /*in*/ "a" (&_argvec[0]) \
1261 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1262 ); \
1263 lval = (__typeof__(lval)) _res; \
1264 } while (0)
1266 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1267 do { \
1268 volatile OrigFn _orig = (orig); \
1269 volatile unsigned long _argvec[3]; \
1270 volatile unsigned long _res; \
1271 _argvec[0] = (unsigned long)_orig.nraddr; \
1272 _argvec[1] = (unsigned long)(arg1); \
1273 _argvec[2] = (unsigned long)(arg2); \
1274 __asm__ volatile( \
1275 VALGRIND_ALIGN_STACK \
1276 "subl $8, %%esp\n\t" \
1277 "pushl 8(%%eax)\n\t" \
1278 "pushl 4(%%eax)\n\t" \
1279 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1280 VALGRIND_CALL_NOREDIR_EAX \
1281 VALGRIND_RESTORE_STACK \
1282 : /*out*/ "=a" (_res) \
1283 : /*in*/ "a" (&_argvec[0]) \
1284 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1285 ); \
1286 lval = (__typeof__(lval)) _res; \
1287 } while (0)
1289 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1290 do { \
1291 volatile OrigFn _orig = (orig); \
1292 volatile unsigned long _argvec[4]; \
1293 volatile unsigned long _res; \
1294 _argvec[0] = (unsigned long)_orig.nraddr; \
1295 _argvec[1] = (unsigned long)(arg1); \
1296 _argvec[2] = (unsigned long)(arg2); \
1297 _argvec[3] = (unsigned long)(arg3); \
1298 __asm__ volatile( \
1299 VALGRIND_ALIGN_STACK \
1300 "subl $4, %%esp\n\t" \
1301 "pushl 12(%%eax)\n\t" \
1302 "pushl 8(%%eax)\n\t" \
1303 "pushl 4(%%eax)\n\t" \
1304 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1305 VALGRIND_CALL_NOREDIR_EAX \
1306 VALGRIND_RESTORE_STACK \
1307 : /*out*/ "=a" (_res) \
1308 : /*in*/ "a" (&_argvec[0]) \
1309 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1310 ); \
1311 lval = (__typeof__(lval)) _res; \
1312 } while (0)
1314 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1315 do { \
1316 volatile OrigFn _orig = (orig); \
1317 volatile unsigned long _argvec[5]; \
1318 volatile unsigned long _res; \
1319 _argvec[0] = (unsigned long)_orig.nraddr; \
1320 _argvec[1] = (unsigned long)(arg1); \
1321 _argvec[2] = (unsigned long)(arg2); \
1322 _argvec[3] = (unsigned long)(arg3); \
1323 _argvec[4] = (unsigned long)(arg4); \
1324 __asm__ volatile( \
1325 VALGRIND_ALIGN_STACK \
1326 "pushl 16(%%eax)\n\t" \
1327 "pushl 12(%%eax)\n\t" \
1328 "pushl 8(%%eax)\n\t" \
1329 "pushl 4(%%eax)\n\t" \
1330 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1331 VALGRIND_CALL_NOREDIR_EAX \
1332 VALGRIND_RESTORE_STACK \
1333 : /*out*/ "=a" (_res) \
1334 : /*in*/ "a" (&_argvec[0]) \
1335 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1336 ); \
1337 lval = (__typeof__(lval)) _res; \
1338 } while (0)
1340 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1341 do { \
1342 volatile OrigFn _orig = (orig); \
1343 volatile unsigned long _argvec[6]; \
1344 volatile unsigned long _res; \
1345 _argvec[0] = (unsigned long)_orig.nraddr; \
1346 _argvec[1] = (unsigned long)(arg1); \
1347 _argvec[2] = (unsigned long)(arg2); \
1348 _argvec[3] = (unsigned long)(arg3); \
1349 _argvec[4] = (unsigned long)(arg4); \
1350 _argvec[5] = (unsigned long)(arg5); \
1351 __asm__ volatile( \
1352 VALGRIND_ALIGN_STACK \
1353 "subl $12, %%esp\n\t" \
1354 "pushl 20(%%eax)\n\t" \
1355 "pushl 16(%%eax)\n\t" \
1356 "pushl 12(%%eax)\n\t" \
1357 "pushl 8(%%eax)\n\t" \
1358 "pushl 4(%%eax)\n\t" \
1359 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1360 VALGRIND_CALL_NOREDIR_EAX \
1361 VALGRIND_RESTORE_STACK \
1362 : /*out*/ "=a" (_res) \
1363 : /*in*/ "a" (&_argvec[0]) \
1364 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1365 ); \
1366 lval = (__typeof__(lval)) _res; \
1367 } while (0)
1369 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1370 do { \
1371 volatile OrigFn _orig = (orig); \
1372 volatile unsigned long _argvec[7]; \
1373 volatile unsigned long _res; \
1374 _argvec[0] = (unsigned long)_orig.nraddr; \
1375 _argvec[1] = (unsigned long)(arg1); \
1376 _argvec[2] = (unsigned long)(arg2); \
1377 _argvec[3] = (unsigned long)(arg3); \
1378 _argvec[4] = (unsigned long)(arg4); \
1379 _argvec[5] = (unsigned long)(arg5); \
1380 _argvec[6] = (unsigned long)(arg6); \
1381 __asm__ volatile( \
1382 VALGRIND_ALIGN_STACK \
1383 "subl $8, %%esp\n\t" \
1384 "pushl 24(%%eax)\n\t" \
1385 "pushl 20(%%eax)\n\t" \
1386 "pushl 16(%%eax)\n\t" \
1387 "pushl 12(%%eax)\n\t" \
1388 "pushl 8(%%eax)\n\t" \
1389 "pushl 4(%%eax)\n\t" \
1390 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1391 VALGRIND_CALL_NOREDIR_EAX \
1392 VALGRIND_RESTORE_STACK \
1393 : /*out*/ "=a" (_res) \
1394 : /*in*/ "a" (&_argvec[0]) \
1395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1396 ); \
1397 lval = (__typeof__(lval)) _res; \
1398 } while (0)
1400 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1401 arg7) \
1402 do { \
1403 volatile OrigFn _orig = (orig); \
1404 volatile unsigned long _argvec[8]; \
1405 volatile unsigned long _res; \
1406 _argvec[0] = (unsigned long)_orig.nraddr; \
1407 _argvec[1] = (unsigned long)(arg1); \
1408 _argvec[2] = (unsigned long)(arg2); \
1409 _argvec[3] = (unsigned long)(arg3); \
1410 _argvec[4] = (unsigned long)(arg4); \
1411 _argvec[5] = (unsigned long)(arg5); \
1412 _argvec[6] = (unsigned long)(arg6); \
1413 _argvec[7] = (unsigned long)(arg7); \
1414 __asm__ volatile( \
1415 VALGRIND_ALIGN_STACK \
1416 "subl $4, %%esp\n\t" \
1417 "pushl 28(%%eax)\n\t" \
1418 "pushl 24(%%eax)\n\t" \
1419 "pushl 20(%%eax)\n\t" \
1420 "pushl 16(%%eax)\n\t" \
1421 "pushl 12(%%eax)\n\t" \
1422 "pushl 8(%%eax)\n\t" \
1423 "pushl 4(%%eax)\n\t" \
1424 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1425 VALGRIND_CALL_NOREDIR_EAX \
1426 VALGRIND_RESTORE_STACK \
1427 : /*out*/ "=a" (_res) \
1428 : /*in*/ "a" (&_argvec[0]) \
1429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1430 ); \
1431 lval = (__typeof__(lval)) _res; \
1432 } while (0)
1434 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1435 arg7,arg8) \
1436 do { \
1437 volatile OrigFn _orig = (orig); \
1438 volatile unsigned long _argvec[9]; \
1439 volatile unsigned long _res; \
1440 _argvec[0] = (unsigned long)_orig.nraddr; \
1441 _argvec[1] = (unsigned long)(arg1); \
1442 _argvec[2] = (unsigned long)(arg2); \
1443 _argvec[3] = (unsigned long)(arg3); \
1444 _argvec[4] = (unsigned long)(arg4); \
1445 _argvec[5] = (unsigned long)(arg5); \
1446 _argvec[6] = (unsigned long)(arg6); \
1447 _argvec[7] = (unsigned long)(arg7); \
1448 _argvec[8] = (unsigned long)(arg8); \
1449 __asm__ volatile( \
1450 VALGRIND_ALIGN_STACK \
1451 "pushl 32(%%eax)\n\t" \
1452 "pushl 28(%%eax)\n\t" \
1453 "pushl 24(%%eax)\n\t" \
1454 "pushl 20(%%eax)\n\t" \
1455 "pushl 16(%%eax)\n\t" \
1456 "pushl 12(%%eax)\n\t" \
1457 "pushl 8(%%eax)\n\t" \
1458 "pushl 4(%%eax)\n\t" \
1459 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1460 VALGRIND_CALL_NOREDIR_EAX \
1461 VALGRIND_RESTORE_STACK \
1462 : /*out*/ "=a" (_res) \
1463 : /*in*/ "a" (&_argvec[0]) \
1464 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1465 ); \
1466 lval = (__typeof__(lval)) _res; \
1467 } while (0)
1469 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1470 arg7,arg8,arg9) \
1471 do { \
1472 volatile OrigFn _orig = (orig); \
1473 volatile unsigned long _argvec[10]; \
1474 volatile unsigned long _res; \
1475 _argvec[0] = (unsigned long)_orig.nraddr; \
1476 _argvec[1] = (unsigned long)(arg1); \
1477 _argvec[2] = (unsigned long)(arg2); \
1478 _argvec[3] = (unsigned long)(arg3); \
1479 _argvec[4] = (unsigned long)(arg4); \
1480 _argvec[5] = (unsigned long)(arg5); \
1481 _argvec[6] = (unsigned long)(arg6); \
1482 _argvec[7] = (unsigned long)(arg7); \
1483 _argvec[8] = (unsigned long)(arg8); \
1484 _argvec[9] = (unsigned long)(arg9); \
1485 __asm__ volatile( \
1486 VALGRIND_ALIGN_STACK \
1487 "subl $12, %%esp\n\t" \
1488 "pushl 36(%%eax)\n\t" \
1489 "pushl 32(%%eax)\n\t" \
1490 "pushl 28(%%eax)\n\t" \
1491 "pushl 24(%%eax)\n\t" \
1492 "pushl 20(%%eax)\n\t" \
1493 "pushl 16(%%eax)\n\t" \
1494 "pushl 12(%%eax)\n\t" \
1495 "pushl 8(%%eax)\n\t" \
1496 "pushl 4(%%eax)\n\t" \
1497 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1498 VALGRIND_CALL_NOREDIR_EAX \
1499 VALGRIND_RESTORE_STACK \
1500 : /*out*/ "=a" (_res) \
1501 : /*in*/ "a" (&_argvec[0]) \
1502 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1503 ); \
1504 lval = (__typeof__(lval)) _res; \
1505 } while (0)
1507 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1508 arg7,arg8,arg9,arg10) \
1509 do { \
1510 volatile OrigFn _orig = (orig); \
1511 volatile unsigned long _argvec[11]; \
1512 volatile unsigned long _res; \
1513 _argvec[0] = (unsigned long)_orig.nraddr; \
1514 _argvec[1] = (unsigned long)(arg1); \
1515 _argvec[2] = (unsigned long)(arg2); \
1516 _argvec[3] = (unsigned long)(arg3); \
1517 _argvec[4] = (unsigned long)(arg4); \
1518 _argvec[5] = (unsigned long)(arg5); \
1519 _argvec[6] = (unsigned long)(arg6); \
1520 _argvec[7] = (unsigned long)(arg7); \
1521 _argvec[8] = (unsigned long)(arg8); \
1522 _argvec[9] = (unsigned long)(arg9); \
1523 _argvec[10] = (unsigned long)(arg10); \
1524 __asm__ volatile( \
1525 VALGRIND_ALIGN_STACK \
1526 "subl $8, %%esp\n\t" \
1527 "pushl 40(%%eax)\n\t" \
1528 "pushl 36(%%eax)\n\t" \
1529 "pushl 32(%%eax)\n\t" \
1530 "pushl 28(%%eax)\n\t" \
1531 "pushl 24(%%eax)\n\t" \
1532 "pushl 20(%%eax)\n\t" \
1533 "pushl 16(%%eax)\n\t" \
1534 "pushl 12(%%eax)\n\t" \
1535 "pushl 8(%%eax)\n\t" \
1536 "pushl 4(%%eax)\n\t" \
1537 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1538 VALGRIND_CALL_NOREDIR_EAX \
1539 VALGRIND_RESTORE_STACK \
1540 : /*out*/ "=a" (_res) \
1541 : /*in*/ "a" (&_argvec[0]) \
1542 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1543 ); \
1544 lval = (__typeof__(lval)) _res; \
1545 } while (0)
1547 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1548 arg6,arg7,arg8,arg9,arg10, \
1549 arg11) \
1550 do { \
1551 volatile OrigFn _orig = (orig); \
1552 volatile unsigned long _argvec[12]; \
1553 volatile unsigned long _res; \
1554 _argvec[0] = (unsigned long)_orig.nraddr; \
1555 _argvec[1] = (unsigned long)(arg1); \
1556 _argvec[2] = (unsigned long)(arg2); \
1557 _argvec[3] = (unsigned long)(arg3); \
1558 _argvec[4] = (unsigned long)(arg4); \
1559 _argvec[5] = (unsigned long)(arg5); \
1560 _argvec[6] = (unsigned long)(arg6); \
1561 _argvec[7] = (unsigned long)(arg7); \
1562 _argvec[8] = (unsigned long)(arg8); \
1563 _argvec[9] = (unsigned long)(arg9); \
1564 _argvec[10] = (unsigned long)(arg10); \
1565 _argvec[11] = (unsigned long)(arg11); \
1566 __asm__ volatile( \
1567 VALGRIND_ALIGN_STACK \
1568 "subl $4, %%esp\n\t" \
1569 "pushl 44(%%eax)\n\t" \
1570 "pushl 40(%%eax)\n\t" \
1571 "pushl 36(%%eax)\n\t" \
1572 "pushl 32(%%eax)\n\t" \
1573 "pushl 28(%%eax)\n\t" \
1574 "pushl 24(%%eax)\n\t" \
1575 "pushl 20(%%eax)\n\t" \
1576 "pushl 16(%%eax)\n\t" \
1577 "pushl 12(%%eax)\n\t" \
1578 "pushl 8(%%eax)\n\t" \
1579 "pushl 4(%%eax)\n\t" \
1580 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1581 VALGRIND_CALL_NOREDIR_EAX \
1582 VALGRIND_RESTORE_STACK \
1583 : /*out*/ "=a" (_res) \
1584 : /*in*/ "a" (&_argvec[0]) \
1585 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1586 ); \
1587 lval = (__typeof__(lval)) _res; \
1588 } while (0)
1590 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1591 arg6,arg7,arg8,arg9,arg10, \
1592 arg11,arg12) \
1593 do { \
1594 volatile OrigFn _orig = (orig); \
1595 volatile unsigned long _argvec[13]; \
1596 volatile unsigned long _res; \
1597 _argvec[0] = (unsigned long)_orig.nraddr; \
1598 _argvec[1] = (unsigned long)(arg1); \
1599 _argvec[2] = (unsigned long)(arg2); \
1600 _argvec[3] = (unsigned long)(arg3); \
1601 _argvec[4] = (unsigned long)(arg4); \
1602 _argvec[5] = (unsigned long)(arg5); \
1603 _argvec[6] = (unsigned long)(arg6); \
1604 _argvec[7] = (unsigned long)(arg7); \
1605 _argvec[8] = (unsigned long)(arg8); \
1606 _argvec[9] = (unsigned long)(arg9); \
1607 _argvec[10] = (unsigned long)(arg10); \
1608 _argvec[11] = (unsigned long)(arg11); \
1609 _argvec[12] = (unsigned long)(arg12); \
1610 __asm__ volatile( \
1611 VALGRIND_ALIGN_STACK \
1612 "pushl 48(%%eax)\n\t" \
1613 "pushl 44(%%eax)\n\t" \
1614 "pushl 40(%%eax)\n\t" \
1615 "pushl 36(%%eax)\n\t" \
1616 "pushl 32(%%eax)\n\t" \
1617 "pushl 28(%%eax)\n\t" \
1618 "pushl 24(%%eax)\n\t" \
1619 "pushl 20(%%eax)\n\t" \
1620 "pushl 16(%%eax)\n\t" \
1621 "pushl 12(%%eax)\n\t" \
1622 "pushl 8(%%eax)\n\t" \
1623 "pushl 4(%%eax)\n\t" \
1624 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1625 VALGRIND_CALL_NOREDIR_EAX \
1626 VALGRIND_RESTORE_STACK \
1627 : /*out*/ "=a" (_res) \
1628 : /*in*/ "a" (&_argvec[0]) \
1629 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1630 ); \
1631 lval = (__typeof__(lval)) _res; \
1632 } while (0)
1634 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1636 /* ------------------------ amd64-{linux,darwin} --------------- */
1638 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1640 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1642 /* These regs are trashed by the hidden call. */
1643 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1644 "rdi", "r8", "r9", "r10", "r11"
1646 /* This is all pretty complex. It's so as to make stack unwinding
1647 work reliably. See bug 243270. The basic problem is the sub and
1648 add of 128 of %rsp in all of the following macros. If gcc believes
1649 the CFA is in %rsp, then unwinding may fail, because what's at the
1650 CFA is not what gcc "expected" when it constructs the CFIs for the
1651 places where the macros are instantiated.
1653 But we can't just add a CFI annotation to increase the CFA offset
1654 by 128, to match the sub of 128 from %rsp, because we don't know
1655 whether gcc has chosen %rsp as the CFA at that point, or whether it
1656 has chosen some other register (eg, %rbp). In the latter case,
1657 adding a CFI annotation to change the CFA offset is simply wrong.
1659 So the solution is to get hold of the CFA using
1660 __builtin_dwarf_cfa(), put it in a known register, and add a
1661 CFI annotation to say what the register is. We choose %rbp for
1662 this (perhaps perversely), because:
1664 (1) %rbp is already subject to unwinding. If a new register was
1665 chosen then the unwinder would have to unwind it in all stack
1666 traces, which is expensive, and
1668 (2) %rbp is already subject to precise exception updates in the
1669 JIT. If a new register was chosen, we'd have to have precise
1670 exceptions for it too, which reduces performance of the
1671 generated code.
1673 However .. one extra complication. We can't just whack the result
1674 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1675 list of trashed registers at the end of the inline assembly
1676 fragments; gcc won't allow %rbp to appear in that list. Hence
1677 instead we need to stash %rbp in %r15 for the duration of the asm,
1678 and say that %r15 is trashed instead. gcc seems happy to go with
1679 that.
1681 Oh .. and this all needs to be conditionalised so that it is
1682 unchanged from before this commit, when compiled with older gccs
1683 that don't support __builtin_dwarf_cfa. Furthermore, since
1684 this header file is freestanding, it has to be independent of
1685 config.h, and so the following conditionalisation cannot depend on
1686 configure time checks.
1688 Although it's not clear from
1689 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1690 this expression excludes Darwin.
1691 .cfi directives in Darwin assembly appear to be completely
1692 different and I haven't investigated how they work.
1694 For even more entertainment value, note we have to use the
1695 completely undocumented __builtin_dwarf_cfa(), which appears to
1696 really compute the CFA, whereas __builtin_frame_address(0) claims
1697 to but actually doesn't. See
1698 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1700 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1701 # define __FRAME_POINTER \
1702 ,"r"(__builtin_dwarf_cfa())
1703 # define VALGRIND_CFI_PROLOGUE \
1704 "movq %%rbp, %%r15\n\t" \
1705 "movq %2, %%rbp\n\t" \
1706 ".cfi_remember_state\n\t" \
1707 ".cfi_def_cfa rbp, 0\n\t"
1708 # define VALGRIND_CFI_EPILOGUE \
1709 "movq %%r15, %%rbp\n\t" \
1710 ".cfi_restore_state\n\t"
1711 #else
1712 # define __FRAME_POINTER
1713 # define VALGRIND_CFI_PROLOGUE
1714 # define VALGRIND_CFI_EPILOGUE
1715 #endif
1717 /* Macros to save and align the stack before making a function
1718 call and restore it afterwards as gcc may not keep the stack
1719 pointer aligned if it doesn't realise calls are being made
1720 to other functions. */
1722 #define VALGRIND_ALIGN_STACK \
1723 "movq %%rsp,%%r14\n\t" \
1724 "andq $0xfffffffffffffff0,%%rsp\n\t"
1725 #define VALGRIND_RESTORE_STACK \
1726 "movq %%r14,%%rsp\n\t"
1728 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1729 long) == 8. */
1731 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1732 macros. In order not to trash the stack redzone, we need to drop
1733 %rsp by 128 before the hidden call, and restore afterwards. The
1734 nastyness is that it is only by luck that the stack still appears
1735 to be unwindable during the hidden call - since then the behaviour
1736 of any routine using this macro does not match what the CFI data
1737 says. Sigh.
1739 Why is this important? Imagine that a wrapper has a stack
1740 allocated local, and passes to the hidden call, a pointer to it.
1741 Because gcc does not know about the hidden call, it may allocate
1742 that local in the redzone. Unfortunately the hidden call may then
1743 trash it before it comes to use it. So we must step clear of the
1744 redzone, for the duration of the hidden call, to make it safe.
1746 Probably the same problem afflicts the other redzone-style ABIs too
1747 (ppc64-linux); but for those, the stack is
1748 self describing (none of this CFI nonsense) so at least messing
1749 with the stack pointer doesn't give a danger of non-unwindable
1750 stack. */
1752 #define CALL_FN_W_v(lval, orig) \
1753 do { \
1754 volatile OrigFn _orig = (orig); \
1755 volatile unsigned long _argvec[1]; \
1756 volatile unsigned long _res; \
1757 _argvec[0] = (unsigned long)_orig.nraddr; \
1758 __asm__ volatile( \
1759 VALGRIND_CFI_PROLOGUE \
1760 VALGRIND_ALIGN_STACK \
1761 "subq $128,%%rsp\n\t" \
1762 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1763 VALGRIND_CALL_NOREDIR_RAX \
1764 VALGRIND_RESTORE_STACK \
1765 VALGRIND_CFI_EPILOGUE \
1766 : /*out*/ "=a" (_res) \
1767 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1768 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1769 ); \
1770 lval = (__typeof__(lval)) _res; \
1771 } while (0)
1773 #define CALL_FN_W_W(lval, orig, arg1) \
1774 do { \
1775 volatile OrigFn _orig = (orig); \
1776 volatile unsigned long _argvec[2]; \
1777 volatile unsigned long _res; \
1778 _argvec[0] = (unsigned long)_orig.nraddr; \
1779 _argvec[1] = (unsigned long)(arg1); \
1780 __asm__ volatile( \
1781 VALGRIND_CFI_PROLOGUE \
1782 VALGRIND_ALIGN_STACK \
1783 "subq $128,%%rsp\n\t" \
1784 "movq 8(%%rax), %%rdi\n\t" \
1785 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1786 VALGRIND_CALL_NOREDIR_RAX \
1787 VALGRIND_RESTORE_STACK \
1788 VALGRIND_CFI_EPILOGUE \
1789 : /*out*/ "=a" (_res) \
1790 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1791 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1792 ); \
1793 lval = (__typeof__(lval)) _res; \
1794 } while (0)
1796 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1797 do { \
1798 volatile OrigFn _orig = (orig); \
1799 volatile unsigned long _argvec[3]; \
1800 volatile unsigned long _res; \
1801 _argvec[0] = (unsigned long)_orig.nraddr; \
1802 _argvec[1] = (unsigned long)(arg1); \
1803 _argvec[2] = (unsigned long)(arg2); \
1804 __asm__ volatile( \
1805 VALGRIND_CFI_PROLOGUE \
1806 VALGRIND_ALIGN_STACK \
1807 "subq $128,%%rsp\n\t" \
1808 "movq 16(%%rax), %%rsi\n\t" \
1809 "movq 8(%%rax), %%rdi\n\t" \
1810 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1811 VALGRIND_CALL_NOREDIR_RAX \
1812 VALGRIND_RESTORE_STACK \
1813 VALGRIND_CFI_EPILOGUE \
1814 : /*out*/ "=a" (_res) \
1815 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1816 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1817 ); \
1818 lval = (__typeof__(lval)) _res; \
1819 } while (0)
1821 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1822 do { \
1823 volatile OrigFn _orig = (orig); \
1824 volatile unsigned long _argvec[4]; \
1825 volatile unsigned long _res; \
1826 _argvec[0] = (unsigned long)_orig.nraddr; \
1827 _argvec[1] = (unsigned long)(arg1); \
1828 _argvec[2] = (unsigned long)(arg2); \
1829 _argvec[3] = (unsigned long)(arg3); \
1830 __asm__ volatile( \
1831 VALGRIND_CFI_PROLOGUE \
1832 VALGRIND_ALIGN_STACK \
1833 "subq $128,%%rsp\n\t" \
1834 "movq 24(%%rax), %%rdx\n\t" \
1835 "movq 16(%%rax), %%rsi\n\t" \
1836 "movq 8(%%rax), %%rdi\n\t" \
1837 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1838 VALGRIND_CALL_NOREDIR_RAX \
1839 VALGRIND_RESTORE_STACK \
1840 VALGRIND_CFI_EPILOGUE \
1841 : /*out*/ "=a" (_res) \
1842 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1843 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1844 ); \
1845 lval = (__typeof__(lval)) _res; \
1846 } while (0)
1848 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1849 do { \
1850 volatile OrigFn _orig = (orig); \
1851 volatile unsigned long _argvec[5]; \
1852 volatile unsigned long _res; \
1853 _argvec[0] = (unsigned long)_orig.nraddr; \
1854 _argvec[1] = (unsigned long)(arg1); \
1855 _argvec[2] = (unsigned long)(arg2); \
1856 _argvec[3] = (unsigned long)(arg3); \
1857 _argvec[4] = (unsigned long)(arg4); \
1858 __asm__ volatile( \
1859 VALGRIND_CFI_PROLOGUE \
1860 VALGRIND_ALIGN_STACK \
1861 "subq $128,%%rsp\n\t" \
1862 "movq 32(%%rax), %%rcx\n\t" \
1863 "movq 24(%%rax), %%rdx\n\t" \
1864 "movq 16(%%rax), %%rsi\n\t" \
1865 "movq 8(%%rax), %%rdi\n\t" \
1866 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1867 VALGRIND_CALL_NOREDIR_RAX \
1868 VALGRIND_RESTORE_STACK \
1869 VALGRIND_CFI_EPILOGUE \
1870 : /*out*/ "=a" (_res) \
1871 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1872 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1873 ); \
1874 lval = (__typeof__(lval)) _res; \
1875 } while (0)
1877 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1878 do { \
1879 volatile OrigFn _orig = (orig); \
1880 volatile unsigned long _argvec[6]; \
1881 volatile unsigned long _res; \
1882 _argvec[0] = (unsigned long)_orig.nraddr; \
1883 _argvec[1] = (unsigned long)(arg1); \
1884 _argvec[2] = (unsigned long)(arg2); \
1885 _argvec[3] = (unsigned long)(arg3); \
1886 _argvec[4] = (unsigned long)(arg4); \
1887 _argvec[5] = (unsigned long)(arg5); \
1888 __asm__ volatile( \
1889 VALGRIND_CFI_PROLOGUE \
1890 VALGRIND_ALIGN_STACK \
1891 "subq $128,%%rsp\n\t" \
1892 "movq 40(%%rax), %%r8\n\t" \
1893 "movq 32(%%rax), %%rcx\n\t" \
1894 "movq 24(%%rax), %%rdx\n\t" \
1895 "movq 16(%%rax), %%rsi\n\t" \
1896 "movq 8(%%rax), %%rdi\n\t" \
1897 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1898 VALGRIND_CALL_NOREDIR_RAX \
1899 VALGRIND_RESTORE_STACK \
1900 VALGRIND_CFI_EPILOGUE \
1901 : /*out*/ "=a" (_res) \
1902 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1903 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1904 ); \
1905 lval = (__typeof__(lval)) _res; \
1906 } while (0)
1908 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1909 do { \
1910 volatile OrigFn _orig = (orig); \
1911 volatile unsigned long _argvec[7]; \
1912 volatile unsigned long _res; \
1913 _argvec[0] = (unsigned long)_orig.nraddr; \
1914 _argvec[1] = (unsigned long)(arg1); \
1915 _argvec[2] = (unsigned long)(arg2); \
1916 _argvec[3] = (unsigned long)(arg3); \
1917 _argvec[4] = (unsigned long)(arg4); \
1918 _argvec[5] = (unsigned long)(arg5); \
1919 _argvec[6] = (unsigned long)(arg6); \
1920 __asm__ volatile( \
1921 VALGRIND_CFI_PROLOGUE \
1922 VALGRIND_ALIGN_STACK \
1923 "subq $128,%%rsp\n\t" \
1924 "movq 48(%%rax), %%r9\n\t" \
1925 "movq 40(%%rax), %%r8\n\t" \
1926 "movq 32(%%rax), %%rcx\n\t" \
1927 "movq 24(%%rax), %%rdx\n\t" \
1928 "movq 16(%%rax), %%rsi\n\t" \
1929 "movq 8(%%rax), %%rdi\n\t" \
1930 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1931 VALGRIND_CALL_NOREDIR_RAX \
1932 VALGRIND_RESTORE_STACK \
1933 VALGRIND_CFI_EPILOGUE \
1934 : /*out*/ "=a" (_res) \
1935 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1936 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1937 ); \
1938 lval = (__typeof__(lval)) _res; \
1939 } while (0)
1941 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1942 arg7) \
1943 do { \
1944 volatile OrigFn _orig = (orig); \
1945 volatile unsigned long _argvec[8]; \
1946 volatile unsigned long _res; \
1947 _argvec[0] = (unsigned long)_orig.nraddr; \
1948 _argvec[1] = (unsigned long)(arg1); \
1949 _argvec[2] = (unsigned long)(arg2); \
1950 _argvec[3] = (unsigned long)(arg3); \
1951 _argvec[4] = (unsigned long)(arg4); \
1952 _argvec[5] = (unsigned long)(arg5); \
1953 _argvec[6] = (unsigned long)(arg6); \
1954 _argvec[7] = (unsigned long)(arg7); \
1955 __asm__ volatile( \
1956 VALGRIND_CFI_PROLOGUE \
1957 VALGRIND_ALIGN_STACK \
1958 "subq $136,%%rsp\n\t" \
1959 "pushq 56(%%rax)\n\t" \
1960 "movq 48(%%rax), %%r9\n\t" \
1961 "movq 40(%%rax), %%r8\n\t" \
1962 "movq 32(%%rax), %%rcx\n\t" \
1963 "movq 24(%%rax), %%rdx\n\t" \
1964 "movq 16(%%rax), %%rsi\n\t" \
1965 "movq 8(%%rax), %%rdi\n\t" \
1966 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1967 VALGRIND_CALL_NOREDIR_RAX \
1968 VALGRIND_RESTORE_STACK \
1969 VALGRIND_CFI_EPILOGUE \
1970 : /*out*/ "=a" (_res) \
1971 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1972 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1973 ); \
1974 lval = (__typeof__(lval)) _res; \
1975 } while (0)
1977 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1978 arg7,arg8) \
1979 do { \
1980 volatile OrigFn _orig = (orig); \
1981 volatile unsigned long _argvec[9]; \
1982 volatile unsigned long _res; \
1983 _argvec[0] = (unsigned long)_orig.nraddr; \
1984 _argvec[1] = (unsigned long)(arg1); \
1985 _argvec[2] = (unsigned long)(arg2); \
1986 _argvec[3] = (unsigned long)(arg3); \
1987 _argvec[4] = (unsigned long)(arg4); \
1988 _argvec[5] = (unsigned long)(arg5); \
1989 _argvec[6] = (unsigned long)(arg6); \
1990 _argvec[7] = (unsigned long)(arg7); \
1991 _argvec[8] = (unsigned long)(arg8); \
1992 __asm__ volatile( \
1993 VALGRIND_CFI_PROLOGUE \
1994 VALGRIND_ALIGN_STACK \
1995 "subq $128,%%rsp\n\t" \
1996 "pushq 64(%%rax)\n\t" \
1997 "pushq 56(%%rax)\n\t" \
1998 "movq 48(%%rax), %%r9\n\t" \
1999 "movq 40(%%rax), %%r8\n\t" \
2000 "movq 32(%%rax), %%rcx\n\t" \
2001 "movq 24(%%rax), %%rdx\n\t" \
2002 "movq 16(%%rax), %%rsi\n\t" \
2003 "movq 8(%%rax), %%rdi\n\t" \
2004 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2005 VALGRIND_CALL_NOREDIR_RAX \
2006 VALGRIND_RESTORE_STACK \
2007 VALGRIND_CFI_EPILOGUE \
2008 : /*out*/ "=a" (_res) \
2009 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2010 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2011 ); \
2012 lval = (__typeof__(lval)) _res; \
2013 } while (0)
2015 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2016 arg7,arg8,arg9) \
2017 do { \
2018 volatile OrigFn _orig = (orig); \
2019 volatile unsigned long _argvec[10]; \
2020 volatile unsigned long _res; \
2021 _argvec[0] = (unsigned long)_orig.nraddr; \
2022 _argvec[1] = (unsigned long)(arg1); \
2023 _argvec[2] = (unsigned long)(arg2); \
2024 _argvec[3] = (unsigned long)(arg3); \
2025 _argvec[4] = (unsigned long)(arg4); \
2026 _argvec[5] = (unsigned long)(arg5); \
2027 _argvec[6] = (unsigned long)(arg6); \
2028 _argvec[7] = (unsigned long)(arg7); \
2029 _argvec[8] = (unsigned long)(arg8); \
2030 _argvec[9] = (unsigned long)(arg9); \
2031 __asm__ volatile( \
2032 VALGRIND_CFI_PROLOGUE \
2033 VALGRIND_ALIGN_STACK \
2034 "subq $136,%%rsp\n\t" \
2035 "pushq 72(%%rax)\n\t" \
2036 "pushq 64(%%rax)\n\t" \
2037 "pushq 56(%%rax)\n\t" \
2038 "movq 48(%%rax), %%r9\n\t" \
2039 "movq 40(%%rax), %%r8\n\t" \
2040 "movq 32(%%rax), %%rcx\n\t" \
2041 "movq 24(%%rax), %%rdx\n\t" \
2042 "movq 16(%%rax), %%rsi\n\t" \
2043 "movq 8(%%rax), %%rdi\n\t" \
2044 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2045 VALGRIND_CALL_NOREDIR_RAX \
2046 VALGRIND_RESTORE_STACK \
2047 VALGRIND_CFI_EPILOGUE \
2048 : /*out*/ "=a" (_res) \
2049 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2050 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2051 ); \
2052 lval = (__typeof__(lval)) _res; \
2053 } while (0)
2055 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2056 arg7,arg8,arg9,arg10) \
2057 do { \
2058 volatile OrigFn _orig = (orig); \
2059 volatile unsigned long _argvec[11]; \
2060 volatile unsigned long _res; \
2061 _argvec[0] = (unsigned long)_orig.nraddr; \
2062 _argvec[1] = (unsigned long)(arg1); \
2063 _argvec[2] = (unsigned long)(arg2); \
2064 _argvec[3] = (unsigned long)(arg3); \
2065 _argvec[4] = (unsigned long)(arg4); \
2066 _argvec[5] = (unsigned long)(arg5); \
2067 _argvec[6] = (unsigned long)(arg6); \
2068 _argvec[7] = (unsigned long)(arg7); \
2069 _argvec[8] = (unsigned long)(arg8); \
2070 _argvec[9] = (unsigned long)(arg9); \
2071 _argvec[10] = (unsigned long)(arg10); \
2072 __asm__ volatile( \
2073 VALGRIND_CFI_PROLOGUE \
2074 VALGRIND_ALIGN_STACK \
2075 "subq $128,%%rsp\n\t" \
2076 "pushq 80(%%rax)\n\t" \
2077 "pushq 72(%%rax)\n\t" \
2078 "pushq 64(%%rax)\n\t" \
2079 "pushq 56(%%rax)\n\t" \
2080 "movq 48(%%rax), %%r9\n\t" \
2081 "movq 40(%%rax), %%r8\n\t" \
2082 "movq 32(%%rax), %%rcx\n\t" \
2083 "movq 24(%%rax), %%rdx\n\t" \
2084 "movq 16(%%rax), %%rsi\n\t" \
2085 "movq 8(%%rax), %%rdi\n\t" \
2086 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2087 VALGRIND_CALL_NOREDIR_RAX \
2088 VALGRIND_RESTORE_STACK \
2089 VALGRIND_CFI_EPILOGUE \
2090 : /*out*/ "=a" (_res) \
2091 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2092 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2093 ); \
2094 lval = (__typeof__(lval)) _res; \
2095 } while (0)
2097 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2098 arg7,arg8,arg9,arg10,arg11) \
2099 do { \
2100 volatile OrigFn _orig = (orig); \
2101 volatile unsigned long _argvec[12]; \
2102 volatile unsigned long _res; \
2103 _argvec[0] = (unsigned long)_orig.nraddr; \
2104 _argvec[1] = (unsigned long)(arg1); \
2105 _argvec[2] = (unsigned long)(arg2); \
2106 _argvec[3] = (unsigned long)(arg3); \
2107 _argvec[4] = (unsigned long)(arg4); \
2108 _argvec[5] = (unsigned long)(arg5); \
2109 _argvec[6] = (unsigned long)(arg6); \
2110 _argvec[7] = (unsigned long)(arg7); \
2111 _argvec[8] = (unsigned long)(arg8); \
2112 _argvec[9] = (unsigned long)(arg9); \
2113 _argvec[10] = (unsigned long)(arg10); \
2114 _argvec[11] = (unsigned long)(arg11); \
2115 __asm__ volatile( \
2116 VALGRIND_CFI_PROLOGUE \
2117 VALGRIND_ALIGN_STACK \
2118 "subq $136,%%rsp\n\t" \
2119 "pushq 88(%%rax)\n\t" \
2120 "pushq 80(%%rax)\n\t" \
2121 "pushq 72(%%rax)\n\t" \
2122 "pushq 64(%%rax)\n\t" \
2123 "pushq 56(%%rax)\n\t" \
2124 "movq 48(%%rax), %%r9\n\t" \
2125 "movq 40(%%rax), %%r8\n\t" \
2126 "movq 32(%%rax), %%rcx\n\t" \
2127 "movq 24(%%rax), %%rdx\n\t" \
2128 "movq 16(%%rax), %%rsi\n\t" \
2129 "movq 8(%%rax), %%rdi\n\t" \
2130 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2131 VALGRIND_CALL_NOREDIR_RAX \
2132 VALGRIND_RESTORE_STACK \
2133 VALGRIND_CFI_EPILOGUE \
2134 : /*out*/ "=a" (_res) \
2135 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2136 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2137 ); \
2138 lval = (__typeof__(lval)) _res; \
2139 } while (0)
2141 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2142 arg7,arg8,arg9,arg10,arg11,arg12) \
2143 do { \
2144 volatile OrigFn _orig = (orig); \
2145 volatile unsigned long _argvec[13]; \
2146 volatile unsigned long _res; \
2147 _argvec[0] = (unsigned long)_orig.nraddr; \
2148 _argvec[1] = (unsigned long)(arg1); \
2149 _argvec[2] = (unsigned long)(arg2); \
2150 _argvec[3] = (unsigned long)(arg3); \
2151 _argvec[4] = (unsigned long)(arg4); \
2152 _argvec[5] = (unsigned long)(arg5); \
2153 _argvec[6] = (unsigned long)(arg6); \
2154 _argvec[7] = (unsigned long)(arg7); \
2155 _argvec[8] = (unsigned long)(arg8); \
2156 _argvec[9] = (unsigned long)(arg9); \
2157 _argvec[10] = (unsigned long)(arg10); \
2158 _argvec[11] = (unsigned long)(arg11); \
2159 _argvec[12] = (unsigned long)(arg12); \
2160 __asm__ volatile( \
2161 VALGRIND_CFI_PROLOGUE \
2162 VALGRIND_ALIGN_STACK \
2163 "subq $128,%%rsp\n\t" \
2164 "pushq 96(%%rax)\n\t" \
2165 "pushq 88(%%rax)\n\t" \
2166 "pushq 80(%%rax)\n\t" \
2167 "pushq 72(%%rax)\n\t" \
2168 "pushq 64(%%rax)\n\t" \
2169 "pushq 56(%%rax)\n\t" \
2170 "movq 48(%%rax), %%r9\n\t" \
2171 "movq 40(%%rax), %%r8\n\t" \
2172 "movq 32(%%rax), %%rcx\n\t" \
2173 "movq 24(%%rax), %%rdx\n\t" \
2174 "movq 16(%%rax), %%rsi\n\t" \
2175 "movq 8(%%rax), %%rdi\n\t" \
2176 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2177 VALGRIND_CALL_NOREDIR_RAX \
2178 VALGRIND_RESTORE_STACK \
2179 VALGRIND_CFI_EPILOGUE \
2180 : /*out*/ "=a" (_res) \
2181 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2182 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2183 ); \
2184 lval = (__typeof__(lval)) _res; \
2185 } while (0)
2187 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2189 /* ------------------------ ppc32-linux ------------------------ */
2191 #if defined(PLAT_ppc32_linux)
2193 /* This is useful for finding out about the on-stack stuff:
2195 extern int f9 ( int,int,int,int,int,int,int,int,int );
2196 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2197 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2198 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2200 int g9 ( void ) {
2201 return f9(11,22,33,44,55,66,77,88,99);
2203 int g10 ( void ) {
2204 return f10(11,22,33,44,55,66,77,88,99,110);
2206 int g11 ( void ) {
2207 return f11(11,22,33,44,55,66,77,88,99,110,121);
2209 int g12 ( void ) {
2210 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2214 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2216 /* These regs are trashed by the hidden call. */
2217 #define __CALLER_SAVED_REGS \
2218 "lr", "ctr", "xer", \
2219 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2220 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2221 "r11", "r12", "r13"
2223 /* Macros to save and align the stack before making a function
2224 call and restore it afterwards as gcc may not keep the stack
2225 pointer aligned if it doesn't realise calls are being made
2226 to other functions. */
2228 #define VALGRIND_ALIGN_STACK \
2229 "mr 28,1\n\t" \
2230 "rlwinm 1,1,0,0,27\n\t"
2231 #define VALGRIND_RESTORE_STACK \
2232 "mr 1,28\n\t"
2234 /* These CALL_FN_ macros assume that on ppc32-linux,
2235 sizeof(unsigned long) == 4. */
2237 #define CALL_FN_W_v(lval, orig) \
2238 do { \
2239 volatile OrigFn _orig = (orig); \
2240 volatile unsigned long _argvec[1]; \
2241 volatile unsigned long _res; \
2242 _argvec[0] = (unsigned long)_orig.nraddr; \
2243 __asm__ volatile( \
2244 VALGRIND_ALIGN_STACK \
2245 "mr 11,%1\n\t" \
2246 "lwz 11,0(11)\n\t" /* target->r11 */ \
2247 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2248 VALGRIND_RESTORE_STACK \
2249 "mr %0,3" \
2250 : /*out*/ "=r" (_res) \
2251 : /*in*/ "r" (&_argvec[0]) \
2252 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2253 ); \
2254 lval = (__typeof__(lval)) _res; \
2255 } while (0)
2257 #define CALL_FN_W_W(lval, orig, arg1) \
2258 do { \
2259 volatile OrigFn _orig = (orig); \
2260 volatile unsigned long _argvec[2]; \
2261 volatile unsigned long _res; \
2262 _argvec[0] = (unsigned long)_orig.nraddr; \
2263 _argvec[1] = (unsigned long)arg1; \
2264 __asm__ volatile( \
2265 VALGRIND_ALIGN_STACK \
2266 "mr 11,%1\n\t" \
2267 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2268 "lwz 11,0(11)\n\t" /* target->r11 */ \
2269 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2270 VALGRIND_RESTORE_STACK \
2271 "mr %0,3" \
2272 : /*out*/ "=r" (_res) \
2273 : /*in*/ "r" (&_argvec[0]) \
2274 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2275 ); \
2276 lval = (__typeof__(lval)) _res; \
2277 } while (0)
2279 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2280 do { \
2281 volatile OrigFn _orig = (orig); \
2282 volatile unsigned long _argvec[3]; \
2283 volatile unsigned long _res; \
2284 _argvec[0] = (unsigned long)_orig.nraddr; \
2285 _argvec[1] = (unsigned long)arg1; \
2286 _argvec[2] = (unsigned long)arg2; \
2287 __asm__ volatile( \
2288 VALGRIND_ALIGN_STACK \
2289 "mr 11,%1\n\t" \
2290 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2291 "lwz 4,8(11)\n\t" \
2292 "lwz 11,0(11)\n\t" /* target->r11 */ \
2293 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2294 VALGRIND_RESTORE_STACK \
2295 "mr %0,3" \
2296 : /*out*/ "=r" (_res) \
2297 : /*in*/ "r" (&_argvec[0]) \
2298 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2299 ); \
2300 lval = (__typeof__(lval)) _res; \
2301 } while (0)
2303 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2304 do { \
2305 volatile OrigFn _orig = (orig); \
2306 volatile unsigned long _argvec[4]; \
2307 volatile unsigned long _res; \
2308 _argvec[0] = (unsigned long)_orig.nraddr; \
2309 _argvec[1] = (unsigned long)arg1; \
2310 _argvec[2] = (unsigned long)arg2; \
2311 _argvec[3] = (unsigned long)arg3; \
2312 __asm__ volatile( \
2313 VALGRIND_ALIGN_STACK \
2314 "mr 11,%1\n\t" \
2315 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2316 "lwz 4,8(11)\n\t" \
2317 "lwz 5,12(11)\n\t" \
2318 "lwz 11,0(11)\n\t" /* target->r11 */ \
2319 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2320 VALGRIND_RESTORE_STACK \
2321 "mr %0,3" \
2322 : /*out*/ "=r" (_res) \
2323 : /*in*/ "r" (&_argvec[0]) \
2324 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2325 ); \
2326 lval = (__typeof__(lval)) _res; \
2327 } while (0)
2329 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2330 do { \
2331 volatile OrigFn _orig = (orig); \
2332 volatile unsigned long _argvec[5]; \
2333 volatile unsigned long _res; \
2334 _argvec[0] = (unsigned long)_orig.nraddr; \
2335 _argvec[1] = (unsigned long)arg1; \
2336 _argvec[2] = (unsigned long)arg2; \
2337 _argvec[3] = (unsigned long)arg3; \
2338 _argvec[4] = (unsigned long)arg4; \
2339 __asm__ volatile( \
2340 VALGRIND_ALIGN_STACK \
2341 "mr 11,%1\n\t" \
2342 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2343 "lwz 4,8(11)\n\t" \
2344 "lwz 5,12(11)\n\t" \
2345 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2346 "lwz 11,0(11)\n\t" /* target->r11 */ \
2347 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2348 VALGRIND_RESTORE_STACK \
2349 "mr %0,3" \
2350 : /*out*/ "=r" (_res) \
2351 : /*in*/ "r" (&_argvec[0]) \
2352 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2353 ); \
2354 lval = (__typeof__(lval)) _res; \
2355 } while (0)
2357 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2358 do { \
2359 volatile OrigFn _orig = (orig); \
2360 volatile unsigned long _argvec[6]; \
2361 volatile unsigned long _res; \
2362 _argvec[0] = (unsigned long)_orig.nraddr; \
2363 _argvec[1] = (unsigned long)arg1; \
2364 _argvec[2] = (unsigned long)arg2; \
2365 _argvec[3] = (unsigned long)arg3; \
2366 _argvec[4] = (unsigned long)arg4; \
2367 _argvec[5] = (unsigned long)arg5; \
2368 __asm__ volatile( \
2369 VALGRIND_ALIGN_STACK \
2370 "mr 11,%1\n\t" \
2371 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2372 "lwz 4,8(11)\n\t" \
2373 "lwz 5,12(11)\n\t" \
2374 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2375 "lwz 7,20(11)\n\t" \
2376 "lwz 11,0(11)\n\t" /* target->r11 */ \
2377 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2378 VALGRIND_RESTORE_STACK \
2379 "mr %0,3" \
2380 : /*out*/ "=r" (_res) \
2381 : /*in*/ "r" (&_argvec[0]) \
2382 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2383 ); \
2384 lval = (__typeof__(lval)) _res; \
2385 } while (0)
2387 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2388 do { \
2389 volatile OrigFn _orig = (orig); \
2390 volatile unsigned long _argvec[7]; \
2391 volatile unsigned long _res; \
2392 _argvec[0] = (unsigned long)_orig.nraddr; \
2393 _argvec[1] = (unsigned long)arg1; \
2394 _argvec[2] = (unsigned long)arg2; \
2395 _argvec[3] = (unsigned long)arg3; \
2396 _argvec[4] = (unsigned long)arg4; \
2397 _argvec[5] = (unsigned long)arg5; \
2398 _argvec[6] = (unsigned long)arg6; \
2399 __asm__ volatile( \
2400 VALGRIND_ALIGN_STACK \
2401 "mr 11,%1\n\t" \
2402 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2403 "lwz 4,8(11)\n\t" \
2404 "lwz 5,12(11)\n\t" \
2405 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2406 "lwz 7,20(11)\n\t" \
2407 "lwz 8,24(11)\n\t" \
2408 "lwz 11,0(11)\n\t" /* target->r11 */ \
2409 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2410 VALGRIND_RESTORE_STACK \
2411 "mr %0,3" \
2412 : /*out*/ "=r" (_res) \
2413 : /*in*/ "r" (&_argvec[0]) \
2414 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2415 ); \
2416 lval = (__typeof__(lval)) _res; \
2417 } while (0)
2419 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2420 arg7) \
2421 do { \
2422 volatile OrigFn _orig = (orig); \
2423 volatile unsigned long _argvec[8]; \
2424 volatile unsigned long _res; \
2425 _argvec[0] = (unsigned long)_orig.nraddr; \
2426 _argvec[1] = (unsigned long)arg1; \
2427 _argvec[2] = (unsigned long)arg2; \
2428 _argvec[3] = (unsigned long)arg3; \
2429 _argvec[4] = (unsigned long)arg4; \
2430 _argvec[5] = (unsigned long)arg5; \
2431 _argvec[6] = (unsigned long)arg6; \
2432 _argvec[7] = (unsigned long)arg7; \
2433 __asm__ volatile( \
2434 VALGRIND_ALIGN_STACK \
2435 "mr 11,%1\n\t" \
2436 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2437 "lwz 4,8(11)\n\t" \
2438 "lwz 5,12(11)\n\t" \
2439 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2440 "lwz 7,20(11)\n\t" \
2441 "lwz 8,24(11)\n\t" \
2442 "lwz 9,28(11)\n\t" \
2443 "lwz 11,0(11)\n\t" /* target->r11 */ \
2444 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2445 VALGRIND_RESTORE_STACK \
2446 "mr %0,3" \
2447 : /*out*/ "=r" (_res) \
2448 : /*in*/ "r" (&_argvec[0]) \
2449 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2450 ); \
2451 lval = (__typeof__(lval)) _res; \
2452 } while (0)
2454 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2455 arg7,arg8) \
2456 do { \
2457 volatile OrigFn _orig = (orig); \
2458 volatile unsigned long _argvec[9]; \
2459 volatile unsigned long _res; \
2460 _argvec[0] = (unsigned long)_orig.nraddr; \
2461 _argvec[1] = (unsigned long)arg1; \
2462 _argvec[2] = (unsigned long)arg2; \
2463 _argvec[3] = (unsigned long)arg3; \
2464 _argvec[4] = (unsigned long)arg4; \
2465 _argvec[5] = (unsigned long)arg5; \
2466 _argvec[6] = (unsigned long)arg6; \
2467 _argvec[7] = (unsigned long)arg7; \
2468 _argvec[8] = (unsigned long)arg8; \
2469 __asm__ volatile( \
2470 VALGRIND_ALIGN_STACK \
2471 "mr 11,%1\n\t" \
2472 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2473 "lwz 4,8(11)\n\t" \
2474 "lwz 5,12(11)\n\t" \
2475 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2476 "lwz 7,20(11)\n\t" \
2477 "lwz 8,24(11)\n\t" \
2478 "lwz 9,28(11)\n\t" \
2479 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2480 "lwz 11,0(11)\n\t" /* target->r11 */ \
2481 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2482 VALGRIND_RESTORE_STACK \
2483 "mr %0,3" \
2484 : /*out*/ "=r" (_res) \
2485 : /*in*/ "r" (&_argvec[0]) \
2486 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2487 ); \
2488 lval = (__typeof__(lval)) _res; \
2489 } while (0)
2491 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2492 arg7,arg8,arg9) \
2493 do { \
2494 volatile OrigFn _orig = (orig); \
2495 volatile unsigned long _argvec[10]; \
2496 volatile unsigned long _res; \
2497 _argvec[0] = (unsigned long)_orig.nraddr; \
2498 _argvec[1] = (unsigned long)arg1; \
2499 _argvec[2] = (unsigned long)arg2; \
2500 _argvec[3] = (unsigned long)arg3; \
2501 _argvec[4] = (unsigned long)arg4; \
2502 _argvec[5] = (unsigned long)arg5; \
2503 _argvec[6] = (unsigned long)arg6; \
2504 _argvec[7] = (unsigned long)arg7; \
2505 _argvec[8] = (unsigned long)arg8; \
2506 _argvec[9] = (unsigned long)arg9; \
2507 __asm__ volatile( \
2508 VALGRIND_ALIGN_STACK \
2509 "mr 11,%1\n\t" \
2510 "addi 1,1,-16\n\t" \
2511 /* arg9 */ \
2512 "lwz 3,36(11)\n\t" \
2513 "stw 3,8(1)\n\t" \
2514 /* args1-8 */ \
2515 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2516 "lwz 4,8(11)\n\t" \
2517 "lwz 5,12(11)\n\t" \
2518 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2519 "lwz 7,20(11)\n\t" \
2520 "lwz 8,24(11)\n\t" \
2521 "lwz 9,28(11)\n\t" \
2522 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2523 "lwz 11,0(11)\n\t" /* target->r11 */ \
2524 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2525 VALGRIND_RESTORE_STACK \
2526 "mr %0,3" \
2527 : /*out*/ "=r" (_res) \
2528 : /*in*/ "r" (&_argvec[0]) \
2529 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2530 ); \
2531 lval = (__typeof__(lval)) _res; \
2532 } while (0)
2534 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2535 arg7,arg8,arg9,arg10) \
2536 do { \
2537 volatile OrigFn _orig = (orig); \
2538 volatile unsigned long _argvec[11]; \
2539 volatile unsigned long _res; \
2540 _argvec[0] = (unsigned long)_orig.nraddr; \
2541 _argvec[1] = (unsigned long)arg1; \
2542 _argvec[2] = (unsigned long)arg2; \
2543 _argvec[3] = (unsigned long)arg3; \
2544 _argvec[4] = (unsigned long)arg4; \
2545 _argvec[5] = (unsigned long)arg5; \
2546 _argvec[6] = (unsigned long)arg6; \
2547 _argvec[7] = (unsigned long)arg7; \
2548 _argvec[8] = (unsigned long)arg8; \
2549 _argvec[9] = (unsigned long)arg9; \
2550 _argvec[10] = (unsigned long)arg10; \
2551 __asm__ volatile( \
2552 VALGRIND_ALIGN_STACK \
2553 "mr 11,%1\n\t" \
2554 "addi 1,1,-16\n\t" \
2555 /* arg10 */ \
2556 "lwz 3,40(11)\n\t" \
2557 "stw 3,12(1)\n\t" \
2558 /* arg9 */ \
2559 "lwz 3,36(11)\n\t" \
2560 "stw 3,8(1)\n\t" \
2561 /* args1-8 */ \
2562 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2563 "lwz 4,8(11)\n\t" \
2564 "lwz 5,12(11)\n\t" \
2565 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2566 "lwz 7,20(11)\n\t" \
2567 "lwz 8,24(11)\n\t" \
2568 "lwz 9,28(11)\n\t" \
2569 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2570 "lwz 11,0(11)\n\t" /* target->r11 */ \
2571 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2572 VALGRIND_RESTORE_STACK \
2573 "mr %0,3" \
2574 : /*out*/ "=r" (_res) \
2575 : /*in*/ "r" (&_argvec[0]) \
2576 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2577 ); \
2578 lval = (__typeof__(lval)) _res; \
2579 } while (0)
2581 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2582 arg7,arg8,arg9,arg10,arg11) \
2583 do { \
2584 volatile OrigFn _orig = (orig); \
2585 volatile unsigned long _argvec[12]; \
2586 volatile unsigned long _res; \
2587 _argvec[0] = (unsigned long)_orig.nraddr; \
2588 _argvec[1] = (unsigned long)arg1; \
2589 _argvec[2] = (unsigned long)arg2; \
2590 _argvec[3] = (unsigned long)arg3; \
2591 _argvec[4] = (unsigned long)arg4; \
2592 _argvec[5] = (unsigned long)arg5; \
2593 _argvec[6] = (unsigned long)arg6; \
2594 _argvec[7] = (unsigned long)arg7; \
2595 _argvec[8] = (unsigned long)arg8; \
2596 _argvec[9] = (unsigned long)arg9; \
2597 _argvec[10] = (unsigned long)arg10; \
2598 _argvec[11] = (unsigned long)arg11; \
2599 __asm__ volatile( \
2600 VALGRIND_ALIGN_STACK \
2601 "mr 11,%1\n\t" \
2602 "addi 1,1,-32\n\t" \
2603 /* arg11 */ \
2604 "lwz 3,44(11)\n\t" \
2605 "stw 3,16(1)\n\t" \
2606 /* arg10 */ \
2607 "lwz 3,40(11)\n\t" \
2608 "stw 3,12(1)\n\t" \
2609 /* arg9 */ \
2610 "lwz 3,36(11)\n\t" \
2611 "stw 3,8(1)\n\t" \
2612 /* args1-8 */ \
2613 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2614 "lwz 4,8(11)\n\t" \
2615 "lwz 5,12(11)\n\t" \
2616 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2617 "lwz 7,20(11)\n\t" \
2618 "lwz 8,24(11)\n\t" \
2619 "lwz 9,28(11)\n\t" \
2620 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2621 "lwz 11,0(11)\n\t" /* target->r11 */ \
2622 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2623 VALGRIND_RESTORE_STACK \
2624 "mr %0,3" \
2625 : /*out*/ "=r" (_res) \
2626 : /*in*/ "r" (&_argvec[0]) \
2627 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2628 ); \
2629 lval = (__typeof__(lval)) _res; \
2630 } while (0)
2632 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2633 arg7,arg8,arg9,arg10,arg11,arg12) \
2634 do { \
2635 volatile OrigFn _orig = (orig); \
2636 volatile unsigned long _argvec[13]; \
2637 volatile unsigned long _res; \
2638 _argvec[0] = (unsigned long)_orig.nraddr; \
2639 _argvec[1] = (unsigned long)arg1; \
2640 _argvec[2] = (unsigned long)arg2; \
2641 _argvec[3] = (unsigned long)arg3; \
2642 _argvec[4] = (unsigned long)arg4; \
2643 _argvec[5] = (unsigned long)arg5; \
2644 _argvec[6] = (unsigned long)arg6; \
2645 _argvec[7] = (unsigned long)arg7; \
2646 _argvec[8] = (unsigned long)arg8; \
2647 _argvec[9] = (unsigned long)arg9; \
2648 _argvec[10] = (unsigned long)arg10; \
2649 _argvec[11] = (unsigned long)arg11; \
2650 _argvec[12] = (unsigned long)arg12; \
2651 __asm__ volatile( \
2652 VALGRIND_ALIGN_STACK \
2653 "mr 11,%1\n\t" \
2654 "addi 1,1,-32\n\t" \
2655 /* arg12 */ \
2656 "lwz 3,48(11)\n\t" \
2657 "stw 3,20(1)\n\t" \
2658 /* arg11 */ \
2659 "lwz 3,44(11)\n\t" \
2660 "stw 3,16(1)\n\t" \
2661 /* arg10 */ \
2662 "lwz 3,40(11)\n\t" \
2663 "stw 3,12(1)\n\t" \
2664 /* arg9 */ \
2665 "lwz 3,36(11)\n\t" \
2666 "stw 3,8(1)\n\t" \
2667 /* args1-8 */ \
2668 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2669 "lwz 4,8(11)\n\t" \
2670 "lwz 5,12(11)\n\t" \
2671 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2672 "lwz 7,20(11)\n\t" \
2673 "lwz 8,24(11)\n\t" \
2674 "lwz 9,28(11)\n\t" \
2675 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2676 "lwz 11,0(11)\n\t" /* target->r11 */ \
2677 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2678 VALGRIND_RESTORE_STACK \
2679 "mr %0,3" \
2680 : /*out*/ "=r" (_res) \
2681 : /*in*/ "r" (&_argvec[0]) \
2682 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2683 ); \
2684 lval = (__typeof__(lval)) _res; \
2685 } while (0)
2687 #endif /* PLAT_ppc32_linux */
2689 /* ------------------------ ppc64-linux ------------------------ */
2691 #if defined(PLAT_ppc64be_linux)
2693 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2695 /* These regs are trashed by the hidden call. */
2696 #define __CALLER_SAVED_REGS \
2697 "lr", "ctr", "xer", \
2698 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2699 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2700 "r11", "r12", "r13"
2702 /* Macros to save and align the stack before making a function
2703 call and restore it afterwards as gcc may not keep the stack
2704 pointer aligned if it doesn't realise calls are being made
2705 to other functions. */
2707 #define VALGRIND_ALIGN_STACK \
2708 "mr 28,1\n\t" \
2709 "rldicr 1,1,0,59\n\t"
2710 #define VALGRIND_RESTORE_STACK \
2711 "mr 1,28\n\t"
2713 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2714 long) == 8. */
2716 #define CALL_FN_W_v(lval, orig) \
2717 do { \
2718 volatile OrigFn _orig = (orig); \
2719 volatile unsigned long _argvec[3+0]; \
2720 volatile unsigned long _res; \
2721 /* _argvec[0] holds current r2 across the call */ \
2722 _argvec[1] = (unsigned long)_orig.r2; \
2723 _argvec[2] = (unsigned long)_orig.nraddr; \
2724 __asm__ volatile( \
2725 VALGRIND_ALIGN_STACK \
2726 "mr 11,%1\n\t" \
2727 "std 2,-16(11)\n\t" /* save tocptr */ \
2728 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2729 "ld 11, 0(11)\n\t" /* target->r11 */ \
2730 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2731 "mr 11,%1\n\t" \
2732 "mr %0,3\n\t" \
2733 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2734 VALGRIND_RESTORE_STACK \
2735 : /*out*/ "=r" (_res) \
2736 : /*in*/ "r" (&_argvec[2]) \
2737 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2738 ); \
2739 lval = (__typeof__(lval)) _res; \
2740 } while (0)
2742 #define CALL_FN_W_W(lval, orig, arg1) \
2743 do { \
2744 volatile OrigFn _orig = (orig); \
2745 volatile unsigned long _argvec[3+1]; \
2746 volatile unsigned long _res; \
2747 /* _argvec[0] holds current r2 across the call */ \
2748 _argvec[1] = (unsigned long)_orig.r2; \
2749 _argvec[2] = (unsigned long)_orig.nraddr; \
2750 _argvec[2+1] = (unsigned long)arg1; \
2751 __asm__ volatile( \
2752 VALGRIND_ALIGN_STACK \
2753 "mr 11,%1\n\t" \
2754 "std 2,-16(11)\n\t" /* save tocptr */ \
2755 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2756 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2757 "ld 11, 0(11)\n\t" /* target->r11 */ \
2758 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2759 "mr 11,%1\n\t" \
2760 "mr %0,3\n\t" \
2761 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2762 VALGRIND_RESTORE_STACK \
2763 : /*out*/ "=r" (_res) \
2764 : /*in*/ "r" (&_argvec[2]) \
2765 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2766 ); \
2767 lval = (__typeof__(lval)) _res; \
2768 } while (0)
2770 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2771 do { \
2772 volatile OrigFn _orig = (orig); \
2773 volatile unsigned long _argvec[3+2]; \
2774 volatile unsigned long _res; \
2775 /* _argvec[0] holds current r2 across the call */ \
2776 _argvec[1] = (unsigned long)_orig.r2; \
2777 _argvec[2] = (unsigned long)_orig.nraddr; \
2778 _argvec[2+1] = (unsigned long)arg1; \
2779 _argvec[2+2] = (unsigned long)arg2; \
2780 __asm__ volatile( \
2781 VALGRIND_ALIGN_STACK \
2782 "mr 11,%1\n\t" \
2783 "std 2,-16(11)\n\t" /* save tocptr */ \
2784 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2785 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2786 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2787 "ld 11, 0(11)\n\t" /* target->r11 */ \
2788 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2789 "mr 11,%1\n\t" \
2790 "mr %0,3\n\t" \
2791 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2792 VALGRIND_RESTORE_STACK \
2793 : /*out*/ "=r" (_res) \
2794 : /*in*/ "r" (&_argvec[2]) \
2795 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2796 ); \
2797 lval = (__typeof__(lval)) _res; \
2798 } while (0)
2800 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2801 do { \
2802 volatile OrigFn _orig = (orig); \
2803 volatile unsigned long _argvec[3+3]; \
2804 volatile unsigned long _res; \
2805 /* _argvec[0] holds current r2 across the call */ \
2806 _argvec[1] = (unsigned long)_orig.r2; \
2807 _argvec[2] = (unsigned long)_orig.nraddr; \
2808 _argvec[2+1] = (unsigned long)arg1; \
2809 _argvec[2+2] = (unsigned long)arg2; \
2810 _argvec[2+3] = (unsigned long)arg3; \
2811 __asm__ volatile( \
2812 VALGRIND_ALIGN_STACK \
2813 "mr 11,%1\n\t" \
2814 "std 2,-16(11)\n\t" /* save tocptr */ \
2815 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2816 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2817 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2818 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2819 "ld 11, 0(11)\n\t" /* target->r11 */ \
2820 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2821 "mr 11,%1\n\t" \
2822 "mr %0,3\n\t" \
2823 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2824 VALGRIND_RESTORE_STACK \
2825 : /*out*/ "=r" (_res) \
2826 : /*in*/ "r" (&_argvec[2]) \
2827 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2828 ); \
2829 lval = (__typeof__(lval)) _res; \
2830 } while (0)
2832 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2833 do { \
2834 volatile OrigFn _orig = (orig); \
2835 volatile unsigned long _argvec[3+4]; \
2836 volatile unsigned long _res; \
2837 /* _argvec[0] holds current r2 across the call */ \
2838 _argvec[1] = (unsigned long)_orig.r2; \
2839 _argvec[2] = (unsigned long)_orig.nraddr; \
2840 _argvec[2+1] = (unsigned long)arg1; \
2841 _argvec[2+2] = (unsigned long)arg2; \
2842 _argvec[2+3] = (unsigned long)arg3; \
2843 _argvec[2+4] = (unsigned long)arg4; \
2844 __asm__ volatile( \
2845 VALGRIND_ALIGN_STACK \
2846 "mr 11,%1\n\t" \
2847 "std 2,-16(11)\n\t" /* save tocptr */ \
2848 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2849 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2850 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2851 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2852 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2853 "ld 11, 0(11)\n\t" /* target->r11 */ \
2854 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2855 "mr 11,%1\n\t" \
2856 "mr %0,3\n\t" \
2857 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2858 VALGRIND_RESTORE_STACK \
2859 : /*out*/ "=r" (_res) \
2860 : /*in*/ "r" (&_argvec[2]) \
2861 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2862 ); \
2863 lval = (__typeof__(lval)) _res; \
2864 } while (0)
2866 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2867 do { \
2868 volatile OrigFn _orig = (orig); \
2869 volatile unsigned long _argvec[3+5]; \
2870 volatile unsigned long _res; \
2871 /* _argvec[0] holds current r2 across the call */ \
2872 _argvec[1] = (unsigned long)_orig.r2; \
2873 _argvec[2] = (unsigned long)_orig.nraddr; \
2874 _argvec[2+1] = (unsigned long)arg1; \
2875 _argvec[2+2] = (unsigned long)arg2; \
2876 _argvec[2+3] = (unsigned long)arg3; \
2877 _argvec[2+4] = (unsigned long)arg4; \
2878 _argvec[2+5] = (unsigned long)arg5; \
2879 __asm__ volatile( \
2880 VALGRIND_ALIGN_STACK \
2881 "mr 11,%1\n\t" \
2882 "std 2,-16(11)\n\t" /* save tocptr */ \
2883 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2884 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2885 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2886 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2887 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2888 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2889 "ld 11, 0(11)\n\t" /* target->r11 */ \
2890 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2891 "mr 11,%1\n\t" \
2892 "mr %0,3\n\t" \
2893 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2894 VALGRIND_RESTORE_STACK \
2895 : /*out*/ "=r" (_res) \
2896 : /*in*/ "r" (&_argvec[2]) \
2897 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2898 ); \
2899 lval = (__typeof__(lval)) _res; \
2900 } while (0)
2902 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2903 do { \
2904 volatile OrigFn _orig = (orig); \
2905 volatile unsigned long _argvec[3+6]; \
2906 volatile unsigned long _res; \
2907 /* _argvec[0] holds current r2 across the call */ \
2908 _argvec[1] = (unsigned long)_orig.r2; \
2909 _argvec[2] = (unsigned long)_orig.nraddr; \
2910 _argvec[2+1] = (unsigned long)arg1; \
2911 _argvec[2+2] = (unsigned long)arg2; \
2912 _argvec[2+3] = (unsigned long)arg3; \
2913 _argvec[2+4] = (unsigned long)arg4; \
2914 _argvec[2+5] = (unsigned long)arg5; \
2915 _argvec[2+6] = (unsigned long)arg6; \
2916 __asm__ volatile( \
2917 VALGRIND_ALIGN_STACK \
2918 "mr 11,%1\n\t" \
2919 "std 2,-16(11)\n\t" /* save tocptr */ \
2920 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2921 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2922 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2923 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2924 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2925 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2926 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2927 "ld 11, 0(11)\n\t" /* target->r11 */ \
2928 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2929 "mr 11,%1\n\t" \
2930 "mr %0,3\n\t" \
2931 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2932 VALGRIND_RESTORE_STACK \
2933 : /*out*/ "=r" (_res) \
2934 : /*in*/ "r" (&_argvec[2]) \
2935 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2936 ); \
2937 lval = (__typeof__(lval)) _res; \
2938 } while (0)
2940 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2941 arg7) \
2942 do { \
2943 volatile OrigFn _orig = (orig); \
2944 volatile unsigned long _argvec[3+7]; \
2945 volatile unsigned long _res; \
2946 /* _argvec[0] holds current r2 across the call */ \
2947 _argvec[1] = (unsigned long)_orig.r2; \
2948 _argvec[2] = (unsigned long)_orig.nraddr; \
2949 _argvec[2+1] = (unsigned long)arg1; \
2950 _argvec[2+2] = (unsigned long)arg2; \
2951 _argvec[2+3] = (unsigned long)arg3; \
2952 _argvec[2+4] = (unsigned long)arg4; \
2953 _argvec[2+5] = (unsigned long)arg5; \
2954 _argvec[2+6] = (unsigned long)arg6; \
2955 _argvec[2+7] = (unsigned long)arg7; \
2956 __asm__ volatile( \
2957 VALGRIND_ALIGN_STACK \
2958 "mr 11,%1\n\t" \
2959 "std 2,-16(11)\n\t" /* save tocptr */ \
2960 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2961 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2962 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2963 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2964 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2965 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2966 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2967 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2968 "ld 11, 0(11)\n\t" /* target->r11 */ \
2969 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2970 "mr 11,%1\n\t" \
2971 "mr %0,3\n\t" \
2972 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2973 VALGRIND_RESTORE_STACK \
2974 : /*out*/ "=r" (_res) \
2975 : /*in*/ "r" (&_argvec[2]) \
2976 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2977 ); \
2978 lval = (__typeof__(lval)) _res; \
2979 } while (0)
2981 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2982 arg7,arg8) \
2983 do { \
2984 volatile OrigFn _orig = (orig); \
2985 volatile unsigned long _argvec[3+8]; \
2986 volatile unsigned long _res; \
2987 /* _argvec[0] holds current r2 across the call */ \
2988 _argvec[1] = (unsigned long)_orig.r2; \
2989 _argvec[2] = (unsigned long)_orig.nraddr; \
2990 _argvec[2+1] = (unsigned long)arg1; \
2991 _argvec[2+2] = (unsigned long)arg2; \
2992 _argvec[2+3] = (unsigned long)arg3; \
2993 _argvec[2+4] = (unsigned long)arg4; \
2994 _argvec[2+5] = (unsigned long)arg5; \
2995 _argvec[2+6] = (unsigned long)arg6; \
2996 _argvec[2+7] = (unsigned long)arg7; \
2997 _argvec[2+8] = (unsigned long)arg8; \
2998 __asm__ volatile( \
2999 VALGRIND_ALIGN_STACK \
3000 "mr 11,%1\n\t" \
3001 "std 2,-16(11)\n\t" /* save tocptr */ \
3002 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3003 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3004 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3005 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3006 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3007 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3008 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3009 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3010 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3011 "ld 11, 0(11)\n\t" /* target->r11 */ \
3012 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3013 "mr 11,%1\n\t" \
3014 "mr %0,3\n\t" \
3015 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3016 VALGRIND_RESTORE_STACK \
3017 : /*out*/ "=r" (_res) \
3018 : /*in*/ "r" (&_argvec[2]) \
3019 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3020 ); \
3021 lval = (__typeof__(lval)) _res; \
3022 } while (0)
3024 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3025 arg7,arg8,arg9) \
3026 do { \
3027 volatile OrigFn _orig = (orig); \
3028 volatile unsigned long _argvec[3+9]; \
3029 volatile unsigned long _res; \
3030 /* _argvec[0] holds current r2 across the call */ \
3031 _argvec[1] = (unsigned long)_orig.r2; \
3032 _argvec[2] = (unsigned long)_orig.nraddr; \
3033 _argvec[2+1] = (unsigned long)arg1; \
3034 _argvec[2+2] = (unsigned long)arg2; \
3035 _argvec[2+3] = (unsigned long)arg3; \
3036 _argvec[2+4] = (unsigned long)arg4; \
3037 _argvec[2+5] = (unsigned long)arg5; \
3038 _argvec[2+6] = (unsigned long)arg6; \
3039 _argvec[2+7] = (unsigned long)arg7; \
3040 _argvec[2+8] = (unsigned long)arg8; \
3041 _argvec[2+9] = (unsigned long)arg9; \
3042 __asm__ volatile( \
3043 VALGRIND_ALIGN_STACK \
3044 "mr 11,%1\n\t" \
3045 "std 2,-16(11)\n\t" /* save tocptr */ \
3046 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3047 "addi 1,1,-128\n\t" /* expand stack frame */ \
3048 /* arg9 */ \
3049 "ld 3,72(11)\n\t" \
3050 "std 3,112(1)\n\t" \
3051 /* args1-8 */ \
3052 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3053 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3054 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3055 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3056 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3057 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3058 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3059 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3060 "ld 11, 0(11)\n\t" /* target->r11 */ \
3061 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3062 "mr 11,%1\n\t" \
3063 "mr %0,3\n\t" \
3064 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3065 VALGRIND_RESTORE_STACK \
3066 : /*out*/ "=r" (_res) \
3067 : /*in*/ "r" (&_argvec[2]) \
3068 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3069 ); \
3070 lval = (__typeof__(lval)) _res; \
3071 } while (0)
3073 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3074 arg7,arg8,arg9,arg10) \
3075 do { \
3076 volatile OrigFn _orig = (orig); \
3077 volatile unsigned long _argvec[3+10]; \
3078 volatile unsigned long _res; \
3079 /* _argvec[0] holds current r2 across the call */ \
3080 _argvec[1] = (unsigned long)_orig.r2; \
3081 _argvec[2] = (unsigned long)_orig.nraddr; \
3082 _argvec[2+1] = (unsigned long)arg1; \
3083 _argvec[2+2] = (unsigned long)arg2; \
3084 _argvec[2+3] = (unsigned long)arg3; \
3085 _argvec[2+4] = (unsigned long)arg4; \
3086 _argvec[2+5] = (unsigned long)arg5; \
3087 _argvec[2+6] = (unsigned long)arg6; \
3088 _argvec[2+7] = (unsigned long)arg7; \
3089 _argvec[2+8] = (unsigned long)arg8; \
3090 _argvec[2+9] = (unsigned long)arg9; \
3091 _argvec[2+10] = (unsigned long)arg10; \
3092 __asm__ volatile( \
3093 VALGRIND_ALIGN_STACK \
3094 "mr 11,%1\n\t" \
3095 "std 2,-16(11)\n\t" /* save tocptr */ \
3096 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3097 "addi 1,1,-128\n\t" /* expand stack frame */ \
3098 /* arg10 */ \
3099 "ld 3,80(11)\n\t" \
3100 "std 3,120(1)\n\t" \
3101 /* arg9 */ \
3102 "ld 3,72(11)\n\t" \
3103 "std 3,112(1)\n\t" \
3104 /* args1-8 */ \
3105 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3106 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3107 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3108 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3109 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3110 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3111 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3112 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3113 "ld 11, 0(11)\n\t" /* target->r11 */ \
3114 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3115 "mr 11,%1\n\t" \
3116 "mr %0,3\n\t" \
3117 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3118 VALGRIND_RESTORE_STACK \
3119 : /*out*/ "=r" (_res) \
3120 : /*in*/ "r" (&_argvec[2]) \
3121 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3122 ); \
3123 lval = (__typeof__(lval)) _res; \
3124 } while (0)
3126 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3127 arg7,arg8,arg9,arg10,arg11) \
3128 do { \
3129 volatile OrigFn _orig = (orig); \
3130 volatile unsigned long _argvec[3+11]; \
3131 volatile unsigned long _res; \
3132 /* _argvec[0] holds current r2 across the call */ \
3133 _argvec[1] = (unsigned long)_orig.r2; \
3134 _argvec[2] = (unsigned long)_orig.nraddr; \
3135 _argvec[2+1] = (unsigned long)arg1; \
3136 _argvec[2+2] = (unsigned long)arg2; \
3137 _argvec[2+3] = (unsigned long)arg3; \
3138 _argvec[2+4] = (unsigned long)arg4; \
3139 _argvec[2+5] = (unsigned long)arg5; \
3140 _argvec[2+6] = (unsigned long)arg6; \
3141 _argvec[2+7] = (unsigned long)arg7; \
3142 _argvec[2+8] = (unsigned long)arg8; \
3143 _argvec[2+9] = (unsigned long)arg9; \
3144 _argvec[2+10] = (unsigned long)arg10; \
3145 _argvec[2+11] = (unsigned long)arg11; \
3146 __asm__ volatile( \
3147 VALGRIND_ALIGN_STACK \
3148 "mr 11,%1\n\t" \
3149 "std 2,-16(11)\n\t" /* save tocptr */ \
3150 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3151 "addi 1,1,-144\n\t" /* expand stack frame */ \
3152 /* arg11 */ \
3153 "ld 3,88(11)\n\t" \
3154 "std 3,128(1)\n\t" \
3155 /* arg10 */ \
3156 "ld 3,80(11)\n\t" \
3157 "std 3,120(1)\n\t" \
3158 /* arg9 */ \
3159 "ld 3,72(11)\n\t" \
3160 "std 3,112(1)\n\t" \
3161 /* args1-8 */ \
3162 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3163 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3164 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3165 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3166 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3167 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3168 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3169 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3170 "ld 11, 0(11)\n\t" /* target->r11 */ \
3171 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3172 "mr 11,%1\n\t" \
3173 "mr %0,3\n\t" \
3174 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3175 VALGRIND_RESTORE_STACK \
3176 : /*out*/ "=r" (_res) \
3177 : /*in*/ "r" (&_argvec[2]) \
3178 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3179 ); \
3180 lval = (__typeof__(lval)) _res; \
3181 } while (0)
3183 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3184 arg7,arg8,arg9,arg10,arg11,arg12) \
3185 do { \
3186 volatile OrigFn _orig = (orig); \
3187 volatile unsigned long _argvec[3+12]; \
3188 volatile unsigned long _res; \
3189 /* _argvec[0] holds current r2 across the call */ \
3190 _argvec[1] = (unsigned long)_orig.r2; \
3191 _argvec[2] = (unsigned long)_orig.nraddr; \
3192 _argvec[2+1] = (unsigned long)arg1; \
3193 _argvec[2+2] = (unsigned long)arg2; \
3194 _argvec[2+3] = (unsigned long)arg3; \
3195 _argvec[2+4] = (unsigned long)arg4; \
3196 _argvec[2+5] = (unsigned long)arg5; \
3197 _argvec[2+6] = (unsigned long)arg6; \
3198 _argvec[2+7] = (unsigned long)arg7; \
3199 _argvec[2+8] = (unsigned long)arg8; \
3200 _argvec[2+9] = (unsigned long)arg9; \
3201 _argvec[2+10] = (unsigned long)arg10; \
3202 _argvec[2+11] = (unsigned long)arg11; \
3203 _argvec[2+12] = (unsigned long)arg12; \
3204 __asm__ volatile( \
3205 VALGRIND_ALIGN_STACK \
3206 "mr 11,%1\n\t" \
3207 "std 2,-16(11)\n\t" /* save tocptr */ \
3208 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3209 "addi 1,1,-144\n\t" /* expand stack frame */ \
3210 /* arg12 */ \
3211 "ld 3,96(11)\n\t" \
3212 "std 3,136(1)\n\t" \
3213 /* arg11 */ \
3214 "ld 3,88(11)\n\t" \
3215 "std 3,128(1)\n\t" \
3216 /* arg10 */ \
3217 "ld 3,80(11)\n\t" \
3218 "std 3,120(1)\n\t" \
3219 /* arg9 */ \
3220 "ld 3,72(11)\n\t" \
3221 "std 3,112(1)\n\t" \
3222 /* args1-8 */ \
3223 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3224 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3225 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3226 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3227 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3228 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3229 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3230 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3231 "ld 11, 0(11)\n\t" /* target->r11 */ \
3232 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3233 "mr 11,%1\n\t" \
3234 "mr %0,3\n\t" \
3235 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3236 VALGRIND_RESTORE_STACK \
3237 : /*out*/ "=r" (_res) \
3238 : /*in*/ "r" (&_argvec[2]) \
3239 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3240 ); \
3241 lval = (__typeof__(lval)) _res; \
3242 } while (0)
3244 #endif /* PLAT_ppc64be_linux */
3246 /* ------------------------- ppc64le-linux ----------------------- */
3247 #if defined(PLAT_ppc64le_linux)
3249 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3251 /* These regs are trashed by the hidden call. */
3252 #define __CALLER_SAVED_REGS \
3253 "lr", "ctr", "xer", \
3254 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3255 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3256 "r11", "r12", "r13"
3258 /* Macros to save and align the stack before making a function
3259 call and restore it afterwards as gcc may not keep the stack
3260 pointer aligned if it doesn't realise calls are being made
3261 to other functions. */
3263 #define VALGRIND_ALIGN_STACK \
3264 "mr 28,1\n\t" \
3265 "rldicr 1,1,0,59\n\t"
3266 #define VALGRIND_RESTORE_STACK \
3267 "mr 1,28\n\t"
3269 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3270 long) == 8. */
3272 #define CALL_FN_W_v(lval, orig) \
3273 do { \
3274 volatile OrigFn _orig = (orig); \
3275 volatile unsigned long _argvec[3+0]; \
3276 volatile unsigned long _res; \
3277 /* _argvec[0] holds current r2 across the call */ \
3278 _argvec[1] = (unsigned long)_orig.r2; \
3279 _argvec[2] = (unsigned long)_orig.nraddr; \
3280 __asm__ volatile( \
3281 VALGRIND_ALIGN_STACK \
3282 "mr 12,%1\n\t" \
3283 "std 2,-16(12)\n\t" /* save tocptr */ \
3284 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3285 "ld 12, 0(12)\n\t" /* target->r12 */ \
3286 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3287 "mr 12,%1\n\t" \
3288 "mr %0,3\n\t" \
3289 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3290 VALGRIND_RESTORE_STACK \
3291 : /*out*/ "=r" (_res) \
3292 : /*in*/ "r" (&_argvec[2]) \
3293 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3294 ); \
3295 lval = (__typeof__(lval)) _res; \
3296 } while (0)
3298 #define CALL_FN_W_W(lval, orig, arg1) \
3299 do { \
3300 volatile OrigFn _orig = (orig); \
3301 volatile unsigned long _argvec[3+1]; \
3302 volatile unsigned long _res; \
3303 /* _argvec[0] holds current r2 across the call */ \
3304 _argvec[1] = (unsigned long)_orig.r2; \
3305 _argvec[2] = (unsigned long)_orig.nraddr; \
3306 _argvec[2+1] = (unsigned long)arg1; \
3307 __asm__ volatile( \
3308 VALGRIND_ALIGN_STACK \
3309 "mr 12,%1\n\t" \
3310 "std 2,-16(12)\n\t" /* save tocptr */ \
3311 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3312 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3313 "ld 12, 0(12)\n\t" /* target->r12 */ \
3314 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3315 "mr 12,%1\n\t" \
3316 "mr %0,3\n\t" \
3317 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3318 VALGRIND_RESTORE_STACK \
3319 : /*out*/ "=r" (_res) \
3320 : /*in*/ "r" (&_argvec[2]) \
3321 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3322 ); \
3323 lval = (__typeof__(lval)) _res; \
3324 } while (0)
3326 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3327 do { \
3328 volatile OrigFn _orig = (orig); \
3329 volatile unsigned long _argvec[3+2]; \
3330 volatile unsigned long _res; \
3331 /* _argvec[0] holds current r2 across the call */ \
3332 _argvec[1] = (unsigned long)_orig.r2; \
3333 _argvec[2] = (unsigned long)_orig.nraddr; \
3334 _argvec[2+1] = (unsigned long)arg1; \
3335 _argvec[2+2] = (unsigned long)arg2; \
3336 __asm__ volatile( \
3337 VALGRIND_ALIGN_STACK \
3338 "mr 12,%1\n\t" \
3339 "std 2,-16(12)\n\t" /* save tocptr */ \
3340 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3341 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3342 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3343 "ld 12, 0(12)\n\t" /* target->r12 */ \
3344 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3345 "mr 12,%1\n\t" \
3346 "mr %0,3\n\t" \
3347 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3348 VALGRIND_RESTORE_STACK \
3349 : /*out*/ "=r" (_res) \
3350 : /*in*/ "r" (&_argvec[2]) \
3351 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3352 ); \
3353 lval = (__typeof__(lval)) _res; \
3354 } while (0)
3356 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3357 do { \
3358 volatile OrigFn _orig = (orig); \
3359 volatile unsigned long _argvec[3+3]; \
3360 volatile unsigned long _res; \
3361 /* _argvec[0] holds current r2 across the call */ \
3362 _argvec[1] = (unsigned long)_orig.r2; \
3363 _argvec[2] = (unsigned long)_orig.nraddr; \
3364 _argvec[2+1] = (unsigned long)arg1; \
3365 _argvec[2+2] = (unsigned long)arg2; \
3366 _argvec[2+3] = (unsigned long)arg3; \
3367 __asm__ volatile( \
3368 VALGRIND_ALIGN_STACK \
3369 "mr 12,%1\n\t" \
3370 "std 2,-16(12)\n\t" /* save tocptr */ \
3371 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3372 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3373 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3374 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3375 "ld 12, 0(12)\n\t" /* target->r12 */ \
3376 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3377 "mr 12,%1\n\t" \
3378 "mr %0,3\n\t" \
3379 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3380 VALGRIND_RESTORE_STACK \
3381 : /*out*/ "=r" (_res) \
3382 : /*in*/ "r" (&_argvec[2]) \
3383 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3384 ); \
3385 lval = (__typeof__(lval)) _res; \
3386 } while (0)
3388 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3389 do { \
3390 volatile OrigFn _orig = (orig); \
3391 volatile unsigned long _argvec[3+4]; \
3392 volatile unsigned long _res; \
3393 /* _argvec[0] holds current r2 across the call */ \
3394 _argvec[1] = (unsigned long)_orig.r2; \
3395 _argvec[2] = (unsigned long)_orig.nraddr; \
3396 _argvec[2+1] = (unsigned long)arg1; \
3397 _argvec[2+2] = (unsigned long)arg2; \
3398 _argvec[2+3] = (unsigned long)arg3; \
3399 _argvec[2+4] = (unsigned long)arg4; \
3400 __asm__ volatile( \
3401 VALGRIND_ALIGN_STACK \
3402 "mr 12,%1\n\t" \
3403 "std 2,-16(12)\n\t" /* save tocptr */ \
3404 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3405 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3406 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3407 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3408 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3409 "ld 12, 0(12)\n\t" /* target->r12 */ \
3410 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3411 "mr 12,%1\n\t" \
3412 "mr %0,3\n\t" \
3413 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3414 VALGRIND_RESTORE_STACK \
3415 : /*out*/ "=r" (_res) \
3416 : /*in*/ "r" (&_argvec[2]) \
3417 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3418 ); \
3419 lval = (__typeof__(lval)) _res; \
3420 } while (0)
3422 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3423 do { \
3424 volatile OrigFn _orig = (orig); \
3425 volatile unsigned long _argvec[3+5]; \
3426 volatile unsigned long _res; \
3427 /* _argvec[0] holds current r2 across the call */ \
3428 _argvec[1] = (unsigned long)_orig.r2; \
3429 _argvec[2] = (unsigned long)_orig.nraddr; \
3430 _argvec[2+1] = (unsigned long)arg1; \
3431 _argvec[2+2] = (unsigned long)arg2; \
3432 _argvec[2+3] = (unsigned long)arg3; \
3433 _argvec[2+4] = (unsigned long)arg4; \
3434 _argvec[2+5] = (unsigned long)arg5; \
3435 __asm__ volatile( \
3436 VALGRIND_ALIGN_STACK \
3437 "mr 12,%1\n\t" \
3438 "std 2,-16(12)\n\t" /* save tocptr */ \
3439 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3440 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3441 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3442 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3443 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3444 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3445 "ld 12, 0(12)\n\t" /* target->r12 */ \
3446 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3447 "mr 12,%1\n\t" \
3448 "mr %0,3\n\t" \
3449 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3450 VALGRIND_RESTORE_STACK \
3451 : /*out*/ "=r" (_res) \
3452 : /*in*/ "r" (&_argvec[2]) \
3453 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3454 ); \
3455 lval = (__typeof__(lval)) _res; \
3456 } while (0)
3458 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3459 do { \
3460 volatile OrigFn _orig = (orig); \
3461 volatile unsigned long _argvec[3+6]; \
3462 volatile unsigned long _res; \
3463 /* _argvec[0] holds current r2 across the call */ \
3464 _argvec[1] = (unsigned long)_orig.r2; \
3465 _argvec[2] = (unsigned long)_orig.nraddr; \
3466 _argvec[2+1] = (unsigned long)arg1; \
3467 _argvec[2+2] = (unsigned long)arg2; \
3468 _argvec[2+3] = (unsigned long)arg3; \
3469 _argvec[2+4] = (unsigned long)arg4; \
3470 _argvec[2+5] = (unsigned long)arg5; \
3471 _argvec[2+6] = (unsigned long)arg6; \
3472 __asm__ volatile( \
3473 VALGRIND_ALIGN_STACK \
3474 "mr 12,%1\n\t" \
3475 "std 2,-16(12)\n\t" /* save tocptr */ \
3476 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3477 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3478 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3479 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3480 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3481 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3482 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3483 "ld 12, 0(12)\n\t" /* target->r12 */ \
3484 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3485 "mr 12,%1\n\t" \
3486 "mr %0,3\n\t" \
3487 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3488 VALGRIND_RESTORE_STACK \
3489 : /*out*/ "=r" (_res) \
3490 : /*in*/ "r" (&_argvec[2]) \
3491 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3492 ); \
3493 lval = (__typeof__(lval)) _res; \
3494 } while (0)
3496 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3497 arg7) \
3498 do { \
3499 volatile OrigFn _orig = (orig); \
3500 volatile unsigned long _argvec[3+7]; \
3501 volatile unsigned long _res; \
3502 /* _argvec[0] holds current r2 across the call */ \
3503 _argvec[1] = (unsigned long)_orig.r2; \
3504 _argvec[2] = (unsigned long)_orig.nraddr; \
3505 _argvec[2+1] = (unsigned long)arg1; \
3506 _argvec[2+2] = (unsigned long)arg2; \
3507 _argvec[2+3] = (unsigned long)arg3; \
3508 _argvec[2+4] = (unsigned long)arg4; \
3509 _argvec[2+5] = (unsigned long)arg5; \
3510 _argvec[2+6] = (unsigned long)arg6; \
3511 _argvec[2+7] = (unsigned long)arg7; \
3512 __asm__ volatile( \
3513 VALGRIND_ALIGN_STACK \
3514 "mr 12,%1\n\t" \
3515 "std 2,-16(12)\n\t" /* save tocptr */ \
3516 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3517 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3518 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3519 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3520 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3521 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3522 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3523 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3524 "ld 12, 0(12)\n\t" /* target->r12 */ \
3525 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3526 "mr 12,%1\n\t" \
3527 "mr %0,3\n\t" \
3528 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3529 VALGRIND_RESTORE_STACK \
3530 : /*out*/ "=r" (_res) \
3531 : /*in*/ "r" (&_argvec[2]) \
3532 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3533 ); \
3534 lval = (__typeof__(lval)) _res; \
3535 } while (0)
3537 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3538 arg7,arg8) \
3539 do { \
3540 volatile OrigFn _orig = (orig); \
3541 volatile unsigned long _argvec[3+8]; \
3542 volatile unsigned long _res; \
3543 /* _argvec[0] holds current r2 across the call */ \
3544 _argvec[1] = (unsigned long)_orig.r2; \
3545 _argvec[2] = (unsigned long)_orig.nraddr; \
3546 _argvec[2+1] = (unsigned long)arg1; \
3547 _argvec[2+2] = (unsigned long)arg2; \
3548 _argvec[2+3] = (unsigned long)arg3; \
3549 _argvec[2+4] = (unsigned long)arg4; \
3550 _argvec[2+5] = (unsigned long)arg5; \
3551 _argvec[2+6] = (unsigned long)arg6; \
3552 _argvec[2+7] = (unsigned long)arg7; \
3553 _argvec[2+8] = (unsigned long)arg8; \
3554 __asm__ volatile( \
3555 VALGRIND_ALIGN_STACK \
3556 "mr 12,%1\n\t" \
3557 "std 2,-16(12)\n\t" /* save tocptr */ \
3558 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3559 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3560 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3561 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3562 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3563 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3564 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3565 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3566 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3567 "ld 12, 0(12)\n\t" /* target->r12 */ \
3568 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3569 "mr 12,%1\n\t" \
3570 "mr %0,3\n\t" \
3571 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3572 VALGRIND_RESTORE_STACK \
3573 : /*out*/ "=r" (_res) \
3574 : /*in*/ "r" (&_argvec[2]) \
3575 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3576 ); \
3577 lval = (__typeof__(lval)) _res; \
3578 } while (0)
3580 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3581 arg7,arg8,arg9) \
3582 do { \
3583 volatile OrigFn _orig = (orig); \
3584 volatile unsigned long _argvec[3+9]; \
3585 volatile unsigned long _res; \
3586 /* _argvec[0] holds current r2 across the call */ \
3587 _argvec[1] = (unsigned long)_orig.r2; \
3588 _argvec[2] = (unsigned long)_orig.nraddr; \
3589 _argvec[2+1] = (unsigned long)arg1; \
3590 _argvec[2+2] = (unsigned long)arg2; \
3591 _argvec[2+3] = (unsigned long)arg3; \
3592 _argvec[2+4] = (unsigned long)arg4; \
3593 _argvec[2+5] = (unsigned long)arg5; \
3594 _argvec[2+6] = (unsigned long)arg6; \
3595 _argvec[2+7] = (unsigned long)arg7; \
3596 _argvec[2+8] = (unsigned long)arg8; \
3597 _argvec[2+9] = (unsigned long)arg9; \
3598 __asm__ volatile( \
3599 VALGRIND_ALIGN_STACK \
3600 "mr 12,%1\n\t" \
3601 "std 2,-16(12)\n\t" /* save tocptr */ \
3602 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3603 "addi 1,1,-128\n\t" /* expand stack frame */ \
3604 /* arg9 */ \
3605 "ld 3,72(12)\n\t" \
3606 "std 3,96(1)\n\t" \
3607 /* args1-8 */ \
3608 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3609 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3610 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3611 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3612 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3613 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3614 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3615 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3616 "ld 12, 0(12)\n\t" /* target->r12 */ \
3617 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3618 "mr 12,%1\n\t" \
3619 "mr %0,3\n\t" \
3620 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3621 VALGRIND_RESTORE_STACK \
3622 : /*out*/ "=r" (_res) \
3623 : /*in*/ "r" (&_argvec[2]) \
3624 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3625 ); \
3626 lval = (__typeof__(lval)) _res; \
3627 } while (0)
3629 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3630 arg7,arg8,arg9,arg10) \
3631 do { \
3632 volatile OrigFn _orig = (orig); \
3633 volatile unsigned long _argvec[3+10]; \
3634 volatile unsigned long _res; \
3635 /* _argvec[0] holds current r2 across the call */ \
3636 _argvec[1] = (unsigned long)_orig.r2; \
3637 _argvec[2] = (unsigned long)_orig.nraddr; \
3638 _argvec[2+1] = (unsigned long)arg1; \
3639 _argvec[2+2] = (unsigned long)arg2; \
3640 _argvec[2+3] = (unsigned long)arg3; \
3641 _argvec[2+4] = (unsigned long)arg4; \
3642 _argvec[2+5] = (unsigned long)arg5; \
3643 _argvec[2+6] = (unsigned long)arg6; \
3644 _argvec[2+7] = (unsigned long)arg7; \
3645 _argvec[2+8] = (unsigned long)arg8; \
3646 _argvec[2+9] = (unsigned long)arg9; \
3647 _argvec[2+10] = (unsigned long)arg10; \
3648 __asm__ volatile( \
3649 VALGRIND_ALIGN_STACK \
3650 "mr 12,%1\n\t" \
3651 "std 2,-16(12)\n\t" /* save tocptr */ \
3652 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3653 "addi 1,1,-128\n\t" /* expand stack frame */ \
3654 /* arg10 */ \
3655 "ld 3,80(12)\n\t" \
3656 "std 3,104(1)\n\t" \
3657 /* arg9 */ \
3658 "ld 3,72(12)\n\t" \
3659 "std 3,96(1)\n\t" \
3660 /* args1-8 */ \
3661 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3662 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3663 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3664 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3665 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3666 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3667 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3668 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3669 "ld 12, 0(12)\n\t" /* target->r12 */ \
3670 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3671 "mr 12,%1\n\t" \
3672 "mr %0,3\n\t" \
3673 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3674 VALGRIND_RESTORE_STACK \
3675 : /*out*/ "=r" (_res) \
3676 : /*in*/ "r" (&_argvec[2]) \
3677 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3678 ); \
3679 lval = (__typeof__(lval)) _res; \
3680 } while (0)
3682 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3683 arg7,arg8,arg9,arg10,arg11) \
3684 do { \
3685 volatile OrigFn _orig = (orig); \
3686 volatile unsigned long _argvec[3+11]; \
3687 volatile unsigned long _res; \
3688 /* _argvec[0] holds current r2 across the call */ \
3689 _argvec[1] = (unsigned long)_orig.r2; \
3690 _argvec[2] = (unsigned long)_orig.nraddr; \
3691 _argvec[2+1] = (unsigned long)arg1; \
3692 _argvec[2+2] = (unsigned long)arg2; \
3693 _argvec[2+3] = (unsigned long)arg3; \
3694 _argvec[2+4] = (unsigned long)arg4; \
3695 _argvec[2+5] = (unsigned long)arg5; \
3696 _argvec[2+6] = (unsigned long)arg6; \
3697 _argvec[2+7] = (unsigned long)arg7; \
3698 _argvec[2+8] = (unsigned long)arg8; \
3699 _argvec[2+9] = (unsigned long)arg9; \
3700 _argvec[2+10] = (unsigned long)arg10; \
3701 _argvec[2+11] = (unsigned long)arg11; \
3702 __asm__ volatile( \
3703 VALGRIND_ALIGN_STACK \
3704 "mr 12,%1\n\t" \
3705 "std 2,-16(12)\n\t" /* save tocptr */ \
3706 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3707 "addi 1,1,-144\n\t" /* expand stack frame */ \
3708 /* arg11 */ \
3709 "ld 3,88(12)\n\t" \
3710 "std 3,112(1)\n\t" \
3711 /* arg10 */ \
3712 "ld 3,80(12)\n\t" \
3713 "std 3,104(1)\n\t" \
3714 /* arg9 */ \
3715 "ld 3,72(12)\n\t" \
3716 "std 3,96(1)\n\t" \
3717 /* args1-8 */ \
3718 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3719 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3720 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3721 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3722 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3723 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3724 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3725 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3726 "ld 12, 0(12)\n\t" /* target->r12 */ \
3727 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3728 "mr 12,%1\n\t" \
3729 "mr %0,3\n\t" \
3730 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3731 VALGRIND_RESTORE_STACK \
3732 : /*out*/ "=r" (_res) \
3733 : /*in*/ "r" (&_argvec[2]) \
3734 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3735 ); \
3736 lval = (__typeof__(lval)) _res; \
3737 } while (0)
3739 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3740 arg7,arg8,arg9,arg10,arg11,arg12) \
3741 do { \
3742 volatile OrigFn _orig = (orig); \
3743 volatile unsigned long _argvec[3+12]; \
3744 volatile unsigned long _res; \
3745 /* _argvec[0] holds current r2 across the call */ \
3746 _argvec[1] = (unsigned long)_orig.r2; \
3747 _argvec[2] = (unsigned long)_orig.nraddr; \
3748 _argvec[2+1] = (unsigned long)arg1; \
3749 _argvec[2+2] = (unsigned long)arg2; \
3750 _argvec[2+3] = (unsigned long)arg3; \
3751 _argvec[2+4] = (unsigned long)arg4; \
3752 _argvec[2+5] = (unsigned long)arg5; \
3753 _argvec[2+6] = (unsigned long)arg6; \
3754 _argvec[2+7] = (unsigned long)arg7; \
3755 _argvec[2+8] = (unsigned long)arg8; \
3756 _argvec[2+9] = (unsigned long)arg9; \
3757 _argvec[2+10] = (unsigned long)arg10; \
3758 _argvec[2+11] = (unsigned long)arg11; \
3759 _argvec[2+12] = (unsigned long)arg12; \
3760 __asm__ volatile( \
3761 VALGRIND_ALIGN_STACK \
3762 "mr 12,%1\n\t" \
3763 "std 2,-16(12)\n\t" /* save tocptr */ \
3764 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3765 "addi 1,1,-144\n\t" /* expand stack frame */ \
3766 /* arg12 */ \
3767 "ld 3,96(12)\n\t" \
3768 "std 3,120(1)\n\t" \
3769 /* arg11 */ \
3770 "ld 3,88(12)\n\t" \
3771 "std 3,112(1)\n\t" \
3772 /* arg10 */ \
3773 "ld 3,80(12)\n\t" \
3774 "std 3,104(1)\n\t" \
3775 /* arg9 */ \
3776 "ld 3,72(12)\n\t" \
3777 "std 3,96(1)\n\t" \
3778 /* args1-8 */ \
3779 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3780 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3781 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3782 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3783 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3784 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3785 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3786 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3787 "ld 12, 0(12)\n\t" /* target->r12 */ \
3788 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3789 "mr 12,%1\n\t" \
3790 "mr %0,3\n\t" \
3791 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3792 VALGRIND_RESTORE_STACK \
3793 : /*out*/ "=r" (_res) \
3794 : /*in*/ "r" (&_argvec[2]) \
3795 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3796 ); \
3797 lval = (__typeof__(lval)) _res; \
3798 } while (0)
3800 #endif /* PLAT_ppc64le_linux */
3802 /* ------------------------- arm-linux ------------------------- */
3804 #if defined(PLAT_arm_linux)
3806 /* These regs are trashed by the hidden call. */
3807 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3809 /* Macros to save and align the stack before making a function
3810 call and restore it afterwards as gcc may not keep the stack
3811 pointer aligned if it doesn't realise calls are being made
3812 to other functions. */
3814 /* This is a bit tricky. We store the original stack pointer in r10
3815 as it is callee-saves. gcc doesn't allow the use of r11 for some
3816 reason. Also, we can't directly "bic" the stack pointer in thumb
3817 mode since r13 isn't an allowed register number in that context.
3818 So use r4 as a temporary, since that is about to get trashed
3819 anyway, just after each use of this macro. Side effect is we need
3820 to be very careful about any future changes, since
3821 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3822 #define VALGRIND_ALIGN_STACK \
3823 "mov r10, sp\n\t" \
3824 "mov r4, sp\n\t" \
3825 "bic r4, r4, #7\n\t" \
3826 "mov sp, r4\n\t"
3827 #define VALGRIND_RESTORE_STACK \
3828 "mov sp, r10\n\t"
3830 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3831 long) == 4. */
3833 #define CALL_FN_W_v(lval, orig) \
3834 do { \
3835 volatile OrigFn _orig = (orig); \
3836 volatile unsigned long _argvec[1]; \
3837 volatile unsigned long _res; \
3838 _argvec[0] = (unsigned long)_orig.nraddr; \
3839 __asm__ volatile( \
3840 VALGRIND_ALIGN_STACK \
3841 "ldr r4, [%1] \n\t" /* target->r4 */ \
3842 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3843 VALGRIND_RESTORE_STACK \
3844 "mov %0, r0\n" \
3845 : /*out*/ "=r" (_res) \
3846 : /*in*/ "0" (&_argvec[0]) \
3847 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3848 ); \
3849 lval = (__typeof__(lval)) _res; \
3850 } while (0)
3852 #define CALL_FN_W_W(lval, orig, arg1) \
3853 do { \
3854 volatile OrigFn _orig = (orig); \
3855 volatile unsigned long _argvec[2]; \
3856 volatile unsigned long _res; \
3857 _argvec[0] = (unsigned long)_orig.nraddr; \
3858 _argvec[1] = (unsigned long)(arg1); \
3859 __asm__ volatile( \
3860 VALGRIND_ALIGN_STACK \
3861 "ldr r0, [%1, #4] \n\t" \
3862 "ldr r4, [%1] \n\t" /* target->r4 */ \
3863 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3864 VALGRIND_RESTORE_STACK \
3865 "mov %0, r0\n" \
3866 : /*out*/ "=r" (_res) \
3867 : /*in*/ "0" (&_argvec[0]) \
3868 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3869 ); \
3870 lval = (__typeof__(lval)) _res; \
3871 } while (0)
3873 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3874 do { \
3875 volatile OrigFn _orig = (orig); \
3876 volatile unsigned long _argvec[3]; \
3877 volatile unsigned long _res; \
3878 _argvec[0] = (unsigned long)_orig.nraddr; \
3879 _argvec[1] = (unsigned long)(arg1); \
3880 _argvec[2] = (unsigned long)(arg2); \
3881 __asm__ volatile( \
3882 VALGRIND_ALIGN_STACK \
3883 "ldr r0, [%1, #4] \n\t" \
3884 "ldr r1, [%1, #8] \n\t" \
3885 "ldr r4, [%1] \n\t" /* target->r4 */ \
3886 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3887 VALGRIND_RESTORE_STACK \
3888 "mov %0, r0\n" \
3889 : /*out*/ "=r" (_res) \
3890 : /*in*/ "0" (&_argvec[0]) \
3891 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3892 ); \
3893 lval = (__typeof__(lval)) _res; \
3894 } while (0)
3896 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3897 do { \
3898 volatile OrigFn _orig = (orig); \
3899 volatile unsigned long _argvec[4]; \
3900 volatile unsigned long _res; \
3901 _argvec[0] = (unsigned long)_orig.nraddr; \
3902 _argvec[1] = (unsigned long)(arg1); \
3903 _argvec[2] = (unsigned long)(arg2); \
3904 _argvec[3] = (unsigned long)(arg3); \
3905 __asm__ volatile( \
3906 VALGRIND_ALIGN_STACK \
3907 "ldr r0, [%1, #4] \n\t" \
3908 "ldr r1, [%1, #8] \n\t" \
3909 "ldr r2, [%1, #12] \n\t" \
3910 "ldr r4, [%1] \n\t" /* target->r4 */ \
3911 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3912 VALGRIND_RESTORE_STACK \
3913 "mov %0, r0\n" \
3914 : /*out*/ "=r" (_res) \
3915 : /*in*/ "0" (&_argvec[0]) \
3916 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3917 ); \
3918 lval = (__typeof__(lval)) _res; \
3919 } while (0)
3921 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3922 do { \
3923 volatile OrigFn _orig = (orig); \
3924 volatile unsigned long _argvec[5]; \
3925 volatile unsigned long _res; \
3926 _argvec[0] = (unsigned long)_orig.nraddr; \
3927 _argvec[1] = (unsigned long)(arg1); \
3928 _argvec[2] = (unsigned long)(arg2); \
3929 _argvec[3] = (unsigned long)(arg3); \
3930 _argvec[4] = (unsigned long)(arg4); \
3931 __asm__ volatile( \
3932 VALGRIND_ALIGN_STACK \
3933 "ldr r0, [%1, #4] \n\t" \
3934 "ldr r1, [%1, #8] \n\t" \
3935 "ldr r2, [%1, #12] \n\t" \
3936 "ldr r3, [%1, #16] \n\t" \
3937 "ldr r4, [%1] \n\t" /* target->r4 */ \
3938 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3939 VALGRIND_RESTORE_STACK \
3940 "mov %0, r0" \
3941 : /*out*/ "=r" (_res) \
3942 : /*in*/ "0" (&_argvec[0]) \
3943 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3944 ); \
3945 lval = (__typeof__(lval)) _res; \
3946 } while (0)
3948 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3949 do { \
3950 volatile OrigFn _orig = (orig); \
3951 volatile unsigned long _argvec[6]; \
3952 volatile unsigned long _res; \
3953 _argvec[0] = (unsigned long)_orig.nraddr; \
3954 _argvec[1] = (unsigned long)(arg1); \
3955 _argvec[2] = (unsigned long)(arg2); \
3956 _argvec[3] = (unsigned long)(arg3); \
3957 _argvec[4] = (unsigned long)(arg4); \
3958 _argvec[5] = (unsigned long)(arg5); \
3959 __asm__ volatile( \
3960 VALGRIND_ALIGN_STACK \
3961 "sub sp, sp, #4 \n\t" \
3962 "ldr r0, [%1, #20] \n\t" \
3963 "push {r0} \n\t" \
3964 "ldr r0, [%1, #4] \n\t" \
3965 "ldr r1, [%1, #8] \n\t" \
3966 "ldr r2, [%1, #12] \n\t" \
3967 "ldr r3, [%1, #16] \n\t" \
3968 "ldr r4, [%1] \n\t" /* target->r4 */ \
3969 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3970 VALGRIND_RESTORE_STACK \
3971 "mov %0, r0" \
3972 : /*out*/ "=r" (_res) \
3973 : /*in*/ "0" (&_argvec[0]) \
3974 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3975 ); \
3976 lval = (__typeof__(lval)) _res; \
3977 } while (0)
3979 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3980 do { \
3981 volatile OrigFn _orig = (orig); \
3982 volatile unsigned long _argvec[7]; \
3983 volatile unsigned long _res; \
3984 _argvec[0] = (unsigned long)_orig.nraddr; \
3985 _argvec[1] = (unsigned long)(arg1); \
3986 _argvec[2] = (unsigned long)(arg2); \
3987 _argvec[3] = (unsigned long)(arg3); \
3988 _argvec[4] = (unsigned long)(arg4); \
3989 _argvec[5] = (unsigned long)(arg5); \
3990 _argvec[6] = (unsigned long)(arg6); \
3991 __asm__ volatile( \
3992 VALGRIND_ALIGN_STACK \
3993 "ldr r0, [%1, #20] \n\t" \
3994 "ldr r1, [%1, #24] \n\t" \
3995 "push {r0, r1} \n\t" \
3996 "ldr r0, [%1, #4] \n\t" \
3997 "ldr r1, [%1, #8] \n\t" \
3998 "ldr r2, [%1, #12] \n\t" \
3999 "ldr r3, [%1, #16] \n\t" \
4000 "ldr r4, [%1] \n\t" /* target->r4 */ \
4001 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4002 VALGRIND_RESTORE_STACK \
4003 "mov %0, r0" \
4004 : /*out*/ "=r" (_res) \
4005 : /*in*/ "0" (&_argvec[0]) \
4006 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4007 ); \
4008 lval = (__typeof__(lval)) _res; \
4009 } while (0)
4011 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4012 arg7) \
4013 do { \
4014 volatile OrigFn _orig = (orig); \
4015 volatile unsigned long _argvec[8]; \
4016 volatile unsigned long _res; \
4017 _argvec[0] = (unsigned long)_orig.nraddr; \
4018 _argvec[1] = (unsigned long)(arg1); \
4019 _argvec[2] = (unsigned long)(arg2); \
4020 _argvec[3] = (unsigned long)(arg3); \
4021 _argvec[4] = (unsigned long)(arg4); \
4022 _argvec[5] = (unsigned long)(arg5); \
4023 _argvec[6] = (unsigned long)(arg6); \
4024 _argvec[7] = (unsigned long)(arg7); \
4025 __asm__ volatile( \
4026 VALGRIND_ALIGN_STACK \
4027 "sub sp, sp, #4 \n\t" \
4028 "ldr r0, [%1, #20] \n\t" \
4029 "ldr r1, [%1, #24] \n\t" \
4030 "ldr r2, [%1, #28] \n\t" \
4031 "push {r0, r1, r2} \n\t" \
4032 "ldr r0, [%1, #4] \n\t" \
4033 "ldr r1, [%1, #8] \n\t" \
4034 "ldr r2, [%1, #12] \n\t" \
4035 "ldr r3, [%1, #16] \n\t" \
4036 "ldr r4, [%1] \n\t" /* target->r4 */ \
4037 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4038 VALGRIND_RESTORE_STACK \
4039 "mov %0, r0" \
4040 : /*out*/ "=r" (_res) \
4041 : /*in*/ "0" (&_argvec[0]) \
4042 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4043 ); \
4044 lval = (__typeof__(lval)) _res; \
4045 } while (0)
4047 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4048 arg7,arg8) \
4049 do { \
4050 volatile OrigFn _orig = (orig); \
4051 volatile unsigned long _argvec[9]; \
4052 volatile unsigned long _res; \
4053 _argvec[0] = (unsigned long)_orig.nraddr; \
4054 _argvec[1] = (unsigned long)(arg1); \
4055 _argvec[2] = (unsigned long)(arg2); \
4056 _argvec[3] = (unsigned long)(arg3); \
4057 _argvec[4] = (unsigned long)(arg4); \
4058 _argvec[5] = (unsigned long)(arg5); \
4059 _argvec[6] = (unsigned long)(arg6); \
4060 _argvec[7] = (unsigned long)(arg7); \
4061 _argvec[8] = (unsigned long)(arg8); \
4062 __asm__ volatile( \
4063 VALGRIND_ALIGN_STACK \
4064 "ldr r0, [%1, #20] \n\t" \
4065 "ldr r1, [%1, #24] \n\t" \
4066 "ldr r2, [%1, #28] \n\t" \
4067 "ldr r3, [%1, #32] \n\t" \
4068 "push {r0, r1, r2, r3} \n\t" \
4069 "ldr r0, [%1, #4] \n\t" \
4070 "ldr r1, [%1, #8] \n\t" \
4071 "ldr r2, [%1, #12] \n\t" \
4072 "ldr r3, [%1, #16] \n\t" \
4073 "ldr r4, [%1] \n\t" /* target->r4 */ \
4074 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4075 VALGRIND_RESTORE_STACK \
4076 "mov %0, r0" \
4077 : /*out*/ "=r" (_res) \
4078 : /*in*/ "0" (&_argvec[0]) \
4079 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4080 ); \
4081 lval = (__typeof__(lval)) _res; \
4082 } while (0)
4084 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4085 arg7,arg8,arg9) \
4086 do { \
4087 volatile OrigFn _orig = (orig); \
4088 volatile unsigned long _argvec[10]; \
4089 volatile unsigned long _res; \
4090 _argvec[0] = (unsigned long)_orig.nraddr; \
4091 _argvec[1] = (unsigned long)(arg1); \
4092 _argvec[2] = (unsigned long)(arg2); \
4093 _argvec[3] = (unsigned long)(arg3); \
4094 _argvec[4] = (unsigned long)(arg4); \
4095 _argvec[5] = (unsigned long)(arg5); \
4096 _argvec[6] = (unsigned long)(arg6); \
4097 _argvec[7] = (unsigned long)(arg7); \
4098 _argvec[8] = (unsigned long)(arg8); \
4099 _argvec[9] = (unsigned long)(arg9); \
4100 __asm__ volatile( \
4101 VALGRIND_ALIGN_STACK \
4102 "sub sp, sp, #4 \n\t" \
4103 "ldr r0, [%1, #20] \n\t" \
4104 "ldr r1, [%1, #24] \n\t" \
4105 "ldr r2, [%1, #28] \n\t" \
4106 "ldr r3, [%1, #32] \n\t" \
4107 "ldr r4, [%1, #36] \n\t" \
4108 "push {r0, r1, r2, r3, r4} \n\t" \
4109 "ldr r0, [%1, #4] \n\t" \
4110 "ldr r1, [%1, #8] \n\t" \
4111 "ldr r2, [%1, #12] \n\t" \
4112 "ldr r3, [%1, #16] \n\t" \
4113 "ldr r4, [%1] \n\t" /* target->r4 */ \
4114 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4115 VALGRIND_RESTORE_STACK \
4116 "mov %0, r0" \
4117 : /*out*/ "=r" (_res) \
4118 : /*in*/ "0" (&_argvec[0]) \
4119 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4120 ); \
4121 lval = (__typeof__(lval)) _res; \
4122 } while (0)
4124 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4125 arg7,arg8,arg9,arg10) \
4126 do { \
4127 volatile OrigFn _orig = (orig); \
4128 volatile unsigned long _argvec[11]; \
4129 volatile unsigned long _res; \
4130 _argvec[0] = (unsigned long)_orig.nraddr; \
4131 _argvec[1] = (unsigned long)(arg1); \
4132 _argvec[2] = (unsigned long)(arg2); \
4133 _argvec[3] = (unsigned long)(arg3); \
4134 _argvec[4] = (unsigned long)(arg4); \
4135 _argvec[5] = (unsigned long)(arg5); \
4136 _argvec[6] = (unsigned long)(arg6); \
4137 _argvec[7] = (unsigned long)(arg7); \
4138 _argvec[8] = (unsigned long)(arg8); \
4139 _argvec[9] = (unsigned long)(arg9); \
4140 _argvec[10] = (unsigned long)(arg10); \
4141 __asm__ volatile( \
4142 VALGRIND_ALIGN_STACK \
4143 "ldr r0, [%1, #40] \n\t" \
4144 "push {r0} \n\t" \
4145 "ldr r0, [%1, #20] \n\t" \
4146 "ldr r1, [%1, #24] \n\t" \
4147 "ldr r2, [%1, #28] \n\t" \
4148 "ldr r3, [%1, #32] \n\t" \
4149 "ldr r4, [%1, #36] \n\t" \
4150 "push {r0, r1, r2, r3, r4} \n\t" \
4151 "ldr r0, [%1, #4] \n\t" \
4152 "ldr r1, [%1, #8] \n\t" \
4153 "ldr r2, [%1, #12] \n\t" \
4154 "ldr r3, [%1, #16] \n\t" \
4155 "ldr r4, [%1] \n\t" /* target->r4 */ \
4156 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4157 VALGRIND_RESTORE_STACK \
4158 "mov %0, r0" \
4159 : /*out*/ "=r" (_res) \
4160 : /*in*/ "0" (&_argvec[0]) \
4161 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4162 ); \
4163 lval = (__typeof__(lval)) _res; \
4164 } while (0)
4166 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4167 arg6,arg7,arg8,arg9,arg10, \
4168 arg11) \
4169 do { \
4170 volatile OrigFn _orig = (orig); \
4171 volatile unsigned long _argvec[12]; \
4172 volatile unsigned long _res; \
4173 _argvec[0] = (unsigned long)_orig.nraddr; \
4174 _argvec[1] = (unsigned long)(arg1); \
4175 _argvec[2] = (unsigned long)(arg2); \
4176 _argvec[3] = (unsigned long)(arg3); \
4177 _argvec[4] = (unsigned long)(arg4); \
4178 _argvec[5] = (unsigned long)(arg5); \
4179 _argvec[6] = (unsigned long)(arg6); \
4180 _argvec[7] = (unsigned long)(arg7); \
4181 _argvec[8] = (unsigned long)(arg8); \
4182 _argvec[9] = (unsigned long)(arg9); \
4183 _argvec[10] = (unsigned long)(arg10); \
4184 _argvec[11] = (unsigned long)(arg11); \
4185 __asm__ volatile( \
4186 VALGRIND_ALIGN_STACK \
4187 "sub sp, sp, #4 \n\t" \
4188 "ldr r0, [%1, #40] \n\t" \
4189 "ldr r1, [%1, #44] \n\t" \
4190 "push {r0, r1} \n\t" \
4191 "ldr r0, [%1, #20] \n\t" \
4192 "ldr r1, [%1, #24] \n\t" \
4193 "ldr r2, [%1, #28] \n\t" \
4194 "ldr r3, [%1, #32] \n\t" \
4195 "ldr r4, [%1, #36] \n\t" \
4196 "push {r0, r1, r2, r3, r4} \n\t" \
4197 "ldr r0, [%1, #4] \n\t" \
4198 "ldr r1, [%1, #8] \n\t" \
4199 "ldr r2, [%1, #12] \n\t" \
4200 "ldr r3, [%1, #16] \n\t" \
4201 "ldr r4, [%1] \n\t" /* target->r4 */ \
4202 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4203 VALGRIND_RESTORE_STACK \
4204 "mov %0, r0" \
4205 : /*out*/ "=r" (_res) \
4206 : /*in*/ "0" (&_argvec[0]) \
4207 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4208 ); \
4209 lval = (__typeof__(lval)) _res; \
4210 } while (0)
4212 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4213 arg6,arg7,arg8,arg9,arg10, \
4214 arg11,arg12) \
4215 do { \
4216 volatile OrigFn _orig = (orig); \
4217 volatile unsigned long _argvec[13]; \
4218 volatile unsigned long _res; \
4219 _argvec[0] = (unsigned long)_orig.nraddr; \
4220 _argvec[1] = (unsigned long)(arg1); \
4221 _argvec[2] = (unsigned long)(arg2); \
4222 _argvec[3] = (unsigned long)(arg3); \
4223 _argvec[4] = (unsigned long)(arg4); \
4224 _argvec[5] = (unsigned long)(arg5); \
4225 _argvec[6] = (unsigned long)(arg6); \
4226 _argvec[7] = (unsigned long)(arg7); \
4227 _argvec[8] = (unsigned long)(arg8); \
4228 _argvec[9] = (unsigned long)(arg9); \
4229 _argvec[10] = (unsigned long)(arg10); \
4230 _argvec[11] = (unsigned long)(arg11); \
4231 _argvec[12] = (unsigned long)(arg12); \
4232 __asm__ volatile( \
4233 VALGRIND_ALIGN_STACK \
4234 "ldr r0, [%1, #40] \n\t" \
4235 "ldr r1, [%1, #44] \n\t" \
4236 "ldr r2, [%1, #48] \n\t" \
4237 "push {r0, r1, r2} \n\t" \
4238 "ldr r0, [%1, #20] \n\t" \
4239 "ldr r1, [%1, #24] \n\t" \
4240 "ldr r2, [%1, #28] \n\t" \
4241 "ldr r3, [%1, #32] \n\t" \
4242 "ldr r4, [%1, #36] \n\t" \
4243 "push {r0, r1, r2, r3, r4} \n\t" \
4244 "ldr r0, [%1, #4] \n\t" \
4245 "ldr r1, [%1, #8] \n\t" \
4246 "ldr r2, [%1, #12] \n\t" \
4247 "ldr r3, [%1, #16] \n\t" \
4248 "ldr r4, [%1] \n\t" /* target->r4 */ \
4249 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4250 VALGRIND_RESTORE_STACK \
4251 "mov %0, r0" \
4252 : /*out*/ "=r" (_res) \
4253 : /*in*/ "0" (&_argvec[0]) \
4254 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4255 ); \
4256 lval = (__typeof__(lval)) _res; \
4257 } while (0)
4259 #endif /* PLAT_arm_linux */
4261 /* ------------------------ arm64-linux ------------------------ */
4263 #if defined(PLAT_arm64_linux)
4265 /* These regs are trashed by the hidden call. */
4266 #define __CALLER_SAVED_REGS \
4267 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4268 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4269 "x18", "x19", "x20", "x30", \
4270 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4271 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4272 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4273 "v26", "v27", "v28", "v29", "v30", "v31"
4275 /* x21 is callee-saved, so we can use it to save and restore SP around
4276 the hidden call. */
4277 #define VALGRIND_ALIGN_STACK \
4278 "mov x21, sp\n\t" \
4279 "bic sp, x21, #15\n\t"
4280 #define VALGRIND_RESTORE_STACK \
4281 "mov sp, x21\n\t"
4283 /* These CALL_FN_ macros assume that on arm64-linux,
4284 sizeof(unsigned long) == 8. */
4286 #define CALL_FN_W_v(lval, orig) \
4287 do { \
4288 volatile OrigFn _orig = (orig); \
4289 volatile unsigned long _argvec[1]; \
4290 volatile unsigned long _res; \
4291 _argvec[0] = (unsigned long)_orig.nraddr; \
4292 __asm__ volatile( \
4293 VALGRIND_ALIGN_STACK \
4294 "ldr x8, [%1] \n\t" /* target->x8 */ \
4295 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4296 VALGRIND_RESTORE_STACK \
4297 "mov %0, x0\n" \
4298 : /*out*/ "=r" (_res) \
4299 : /*in*/ "0" (&_argvec[0]) \
4300 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4301 ); \
4302 lval = (__typeof__(lval)) _res; \
4303 } while (0)
4305 #define CALL_FN_W_W(lval, orig, arg1) \
4306 do { \
4307 volatile OrigFn _orig = (orig); \
4308 volatile unsigned long _argvec[2]; \
4309 volatile unsigned long _res; \
4310 _argvec[0] = (unsigned long)_orig.nraddr; \
4311 _argvec[1] = (unsigned long)(arg1); \
4312 __asm__ volatile( \
4313 VALGRIND_ALIGN_STACK \
4314 "ldr x0, [%1, #8] \n\t" \
4315 "ldr x8, [%1] \n\t" /* target->x8 */ \
4316 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4317 VALGRIND_RESTORE_STACK \
4318 "mov %0, x0\n" \
4319 : /*out*/ "=r" (_res) \
4320 : /*in*/ "0" (&_argvec[0]) \
4321 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4322 ); \
4323 lval = (__typeof__(lval)) _res; \
4324 } while (0)
4326 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4327 do { \
4328 volatile OrigFn _orig = (orig); \
4329 volatile unsigned long _argvec[3]; \
4330 volatile unsigned long _res; \
4331 _argvec[0] = (unsigned long)_orig.nraddr; \
4332 _argvec[1] = (unsigned long)(arg1); \
4333 _argvec[2] = (unsigned long)(arg2); \
4334 __asm__ volatile( \
4335 VALGRIND_ALIGN_STACK \
4336 "ldr x0, [%1, #8] \n\t" \
4337 "ldr x1, [%1, #16] \n\t" \
4338 "ldr x8, [%1] \n\t" /* target->x8 */ \
4339 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4340 VALGRIND_RESTORE_STACK \
4341 "mov %0, x0\n" \
4342 : /*out*/ "=r" (_res) \
4343 : /*in*/ "0" (&_argvec[0]) \
4344 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4345 ); \
4346 lval = (__typeof__(lval)) _res; \
4347 } while (0)
4349 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4350 do { \
4351 volatile OrigFn _orig = (orig); \
4352 volatile unsigned long _argvec[4]; \
4353 volatile unsigned long _res; \
4354 _argvec[0] = (unsigned long)_orig.nraddr; \
4355 _argvec[1] = (unsigned long)(arg1); \
4356 _argvec[2] = (unsigned long)(arg2); \
4357 _argvec[3] = (unsigned long)(arg3); \
4358 __asm__ volatile( \
4359 VALGRIND_ALIGN_STACK \
4360 "ldr x0, [%1, #8] \n\t" \
4361 "ldr x1, [%1, #16] \n\t" \
4362 "ldr x2, [%1, #24] \n\t" \
4363 "ldr x8, [%1] \n\t" /* target->x8 */ \
4364 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4365 VALGRIND_RESTORE_STACK \
4366 "mov %0, x0\n" \
4367 : /*out*/ "=r" (_res) \
4368 : /*in*/ "0" (&_argvec[0]) \
4369 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4370 ); \
4371 lval = (__typeof__(lval)) _res; \
4372 } while (0)
4374 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4375 do { \
4376 volatile OrigFn _orig = (orig); \
4377 volatile unsigned long _argvec[5]; \
4378 volatile unsigned long _res; \
4379 _argvec[0] = (unsigned long)_orig.nraddr; \
4380 _argvec[1] = (unsigned long)(arg1); \
4381 _argvec[2] = (unsigned long)(arg2); \
4382 _argvec[3] = (unsigned long)(arg3); \
4383 _argvec[4] = (unsigned long)(arg4); \
4384 __asm__ volatile( \
4385 VALGRIND_ALIGN_STACK \
4386 "ldr x0, [%1, #8] \n\t" \
4387 "ldr x1, [%1, #16] \n\t" \
4388 "ldr x2, [%1, #24] \n\t" \
4389 "ldr x3, [%1, #32] \n\t" \
4390 "ldr x8, [%1] \n\t" /* target->x8 */ \
4391 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4392 VALGRIND_RESTORE_STACK \
4393 "mov %0, x0" \
4394 : /*out*/ "=r" (_res) \
4395 : /*in*/ "0" (&_argvec[0]) \
4396 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4397 ); \
4398 lval = (__typeof__(lval)) _res; \
4399 } while (0)
4401 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4402 do { \
4403 volatile OrigFn _orig = (orig); \
4404 volatile unsigned long _argvec[6]; \
4405 volatile unsigned long _res; \
4406 _argvec[0] = (unsigned long)_orig.nraddr; \
4407 _argvec[1] = (unsigned long)(arg1); \
4408 _argvec[2] = (unsigned long)(arg2); \
4409 _argvec[3] = (unsigned long)(arg3); \
4410 _argvec[4] = (unsigned long)(arg4); \
4411 _argvec[5] = (unsigned long)(arg5); \
4412 __asm__ volatile( \
4413 VALGRIND_ALIGN_STACK \
4414 "ldr x0, [%1, #8] \n\t" \
4415 "ldr x1, [%1, #16] \n\t" \
4416 "ldr x2, [%1, #24] \n\t" \
4417 "ldr x3, [%1, #32] \n\t" \
4418 "ldr x4, [%1, #40] \n\t" \
4419 "ldr x8, [%1] \n\t" /* target->x8 */ \
4420 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4421 VALGRIND_RESTORE_STACK \
4422 "mov %0, x0" \
4423 : /*out*/ "=r" (_res) \
4424 : /*in*/ "0" (&_argvec[0]) \
4425 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4426 ); \
4427 lval = (__typeof__(lval)) _res; \
4428 } while (0)
4430 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4431 do { \
4432 volatile OrigFn _orig = (orig); \
4433 volatile unsigned long _argvec[7]; \
4434 volatile unsigned long _res; \
4435 _argvec[0] = (unsigned long)_orig.nraddr; \
4436 _argvec[1] = (unsigned long)(arg1); \
4437 _argvec[2] = (unsigned long)(arg2); \
4438 _argvec[3] = (unsigned long)(arg3); \
4439 _argvec[4] = (unsigned long)(arg4); \
4440 _argvec[5] = (unsigned long)(arg5); \
4441 _argvec[6] = (unsigned long)(arg6); \
4442 __asm__ volatile( \
4443 VALGRIND_ALIGN_STACK \
4444 "ldr x0, [%1, #8] \n\t" \
4445 "ldr x1, [%1, #16] \n\t" \
4446 "ldr x2, [%1, #24] \n\t" \
4447 "ldr x3, [%1, #32] \n\t" \
4448 "ldr x4, [%1, #40] \n\t" \
4449 "ldr x5, [%1, #48] \n\t" \
4450 "ldr x8, [%1] \n\t" /* target->x8 */ \
4451 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4452 VALGRIND_RESTORE_STACK \
4453 "mov %0, x0" \
4454 : /*out*/ "=r" (_res) \
4455 : /*in*/ "0" (&_argvec[0]) \
4456 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4457 ); \
4458 lval = (__typeof__(lval)) _res; \
4459 } while (0)
4461 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4462 arg7) \
4463 do { \
4464 volatile OrigFn _orig = (orig); \
4465 volatile unsigned long _argvec[8]; \
4466 volatile unsigned long _res; \
4467 _argvec[0] = (unsigned long)_orig.nraddr; \
4468 _argvec[1] = (unsigned long)(arg1); \
4469 _argvec[2] = (unsigned long)(arg2); \
4470 _argvec[3] = (unsigned long)(arg3); \
4471 _argvec[4] = (unsigned long)(arg4); \
4472 _argvec[5] = (unsigned long)(arg5); \
4473 _argvec[6] = (unsigned long)(arg6); \
4474 _argvec[7] = (unsigned long)(arg7); \
4475 __asm__ volatile( \
4476 VALGRIND_ALIGN_STACK \
4477 "ldr x0, [%1, #8] \n\t" \
4478 "ldr x1, [%1, #16] \n\t" \
4479 "ldr x2, [%1, #24] \n\t" \
4480 "ldr x3, [%1, #32] \n\t" \
4481 "ldr x4, [%1, #40] \n\t" \
4482 "ldr x5, [%1, #48] \n\t" \
4483 "ldr x6, [%1, #56] \n\t" \
4484 "ldr x8, [%1] \n\t" /* target->x8 */ \
4485 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4486 VALGRIND_RESTORE_STACK \
4487 "mov %0, x0" \
4488 : /*out*/ "=r" (_res) \
4489 : /*in*/ "0" (&_argvec[0]) \
4490 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4491 ); \
4492 lval = (__typeof__(lval)) _res; \
4493 } while (0)
4495 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4496 arg7,arg8) \
4497 do { \
4498 volatile OrigFn _orig = (orig); \
4499 volatile unsigned long _argvec[9]; \
4500 volatile unsigned long _res; \
4501 _argvec[0] = (unsigned long)_orig.nraddr; \
4502 _argvec[1] = (unsigned long)(arg1); \
4503 _argvec[2] = (unsigned long)(arg2); \
4504 _argvec[3] = (unsigned long)(arg3); \
4505 _argvec[4] = (unsigned long)(arg4); \
4506 _argvec[5] = (unsigned long)(arg5); \
4507 _argvec[6] = (unsigned long)(arg6); \
4508 _argvec[7] = (unsigned long)(arg7); \
4509 _argvec[8] = (unsigned long)(arg8); \
4510 __asm__ volatile( \
4511 VALGRIND_ALIGN_STACK \
4512 "ldr x0, [%1, #8] \n\t" \
4513 "ldr x1, [%1, #16] \n\t" \
4514 "ldr x2, [%1, #24] \n\t" \
4515 "ldr x3, [%1, #32] \n\t" \
4516 "ldr x4, [%1, #40] \n\t" \
4517 "ldr x5, [%1, #48] \n\t" \
4518 "ldr x6, [%1, #56] \n\t" \
4519 "ldr x7, [%1, #64] \n\t" \
4520 "ldr x8, [%1] \n\t" /* target->x8 */ \
4521 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4522 VALGRIND_RESTORE_STACK \
4523 "mov %0, x0" \
4524 : /*out*/ "=r" (_res) \
4525 : /*in*/ "0" (&_argvec[0]) \
4526 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4527 ); \
4528 lval = (__typeof__(lval)) _res; \
4529 } while (0)
4531 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4532 arg7,arg8,arg9) \
4533 do { \
4534 volatile OrigFn _orig = (orig); \
4535 volatile unsigned long _argvec[10]; \
4536 volatile unsigned long _res; \
4537 _argvec[0] = (unsigned long)_orig.nraddr; \
4538 _argvec[1] = (unsigned long)(arg1); \
4539 _argvec[2] = (unsigned long)(arg2); \
4540 _argvec[3] = (unsigned long)(arg3); \
4541 _argvec[4] = (unsigned long)(arg4); \
4542 _argvec[5] = (unsigned long)(arg5); \
4543 _argvec[6] = (unsigned long)(arg6); \
4544 _argvec[7] = (unsigned long)(arg7); \
4545 _argvec[8] = (unsigned long)(arg8); \
4546 _argvec[9] = (unsigned long)(arg9); \
4547 __asm__ volatile( \
4548 VALGRIND_ALIGN_STACK \
4549 "sub sp, sp, #0x20 \n\t" \
4550 "ldr x0, [%1, #8] \n\t" \
4551 "ldr x1, [%1, #16] \n\t" \
4552 "ldr x2, [%1, #24] \n\t" \
4553 "ldr x3, [%1, #32] \n\t" \
4554 "ldr x4, [%1, #40] \n\t" \
4555 "ldr x5, [%1, #48] \n\t" \
4556 "ldr x6, [%1, #56] \n\t" \
4557 "ldr x7, [%1, #64] \n\t" \
4558 "ldr x8, [%1, #72] \n\t" \
4559 "str x8, [sp, #0] \n\t" \
4560 "ldr x8, [%1] \n\t" /* target->x8 */ \
4561 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4562 VALGRIND_RESTORE_STACK \
4563 "mov %0, x0" \
4564 : /*out*/ "=r" (_res) \
4565 : /*in*/ "0" (&_argvec[0]) \
4566 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4567 ); \
4568 lval = (__typeof__(lval)) _res; \
4569 } while (0)
4571 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4572 arg7,arg8,arg9,arg10) \
4573 do { \
4574 volatile OrigFn _orig = (orig); \
4575 volatile unsigned long _argvec[11]; \
4576 volatile unsigned long _res; \
4577 _argvec[0] = (unsigned long)_orig.nraddr; \
4578 _argvec[1] = (unsigned long)(arg1); \
4579 _argvec[2] = (unsigned long)(arg2); \
4580 _argvec[3] = (unsigned long)(arg3); \
4581 _argvec[4] = (unsigned long)(arg4); \
4582 _argvec[5] = (unsigned long)(arg5); \
4583 _argvec[6] = (unsigned long)(arg6); \
4584 _argvec[7] = (unsigned long)(arg7); \
4585 _argvec[8] = (unsigned long)(arg8); \
4586 _argvec[9] = (unsigned long)(arg9); \
4587 _argvec[10] = (unsigned long)(arg10); \
4588 __asm__ volatile( \
4589 VALGRIND_ALIGN_STACK \
4590 "sub sp, sp, #0x20 \n\t" \
4591 "ldr x0, [%1, #8] \n\t" \
4592 "ldr x1, [%1, #16] \n\t" \
4593 "ldr x2, [%1, #24] \n\t" \
4594 "ldr x3, [%1, #32] \n\t" \
4595 "ldr x4, [%1, #40] \n\t" \
4596 "ldr x5, [%1, #48] \n\t" \
4597 "ldr x6, [%1, #56] \n\t" \
4598 "ldr x7, [%1, #64] \n\t" \
4599 "ldr x8, [%1, #72] \n\t" \
4600 "str x8, [sp, #0] \n\t" \
4601 "ldr x8, [%1, #80] \n\t" \
4602 "str x8, [sp, #8] \n\t" \
4603 "ldr x8, [%1] \n\t" /* target->x8 */ \
4604 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4605 VALGRIND_RESTORE_STACK \
4606 "mov %0, x0" \
4607 : /*out*/ "=r" (_res) \
4608 : /*in*/ "0" (&_argvec[0]) \
4609 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4610 ); \
4611 lval = (__typeof__(lval)) _res; \
4612 } while (0)
4614 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4615 arg7,arg8,arg9,arg10,arg11) \
4616 do { \
4617 volatile OrigFn _orig = (orig); \
4618 volatile unsigned long _argvec[12]; \
4619 volatile unsigned long _res; \
4620 _argvec[0] = (unsigned long)_orig.nraddr; \
4621 _argvec[1] = (unsigned long)(arg1); \
4622 _argvec[2] = (unsigned long)(arg2); \
4623 _argvec[3] = (unsigned long)(arg3); \
4624 _argvec[4] = (unsigned long)(arg4); \
4625 _argvec[5] = (unsigned long)(arg5); \
4626 _argvec[6] = (unsigned long)(arg6); \
4627 _argvec[7] = (unsigned long)(arg7); \
4628 _argvec[8] = (unsigned long)(arg8); \
4629 _argvec[9] = (unsigned long)(arg9); \
4630 _argvec[10] = (unsigned long)(arg10); \
4631 _argvec[11] = (unsigned long)(arg11); \
4632 __asm__ volatile( \
4633 VALGRIND_ALIGN_STACK \
4634 "sub sp, sp, #0x30 \n\t" \
4635 "ldr x0, [%1, #8] \n\t" \
4636 "ldr x1, [%1, #16] \n\t" \
4637 "ldr x2, [%1, #24] \n\t" \
4638 "ldr x3, [%1, #32] \n\t" \
4639 "ldr x4, [%1, #40] \n\t" \
4640 "ldr x5, [%1, #48] \n\t" \
4641 "ldr x6, [%1, #56] \n\t" \
4642 "ldr x7, [%1, #64] \n\t" \
4643 "ldr x8, [%1, #72] \n\t" \
4644 "str x8, [sp, #0] \n\t" \
4645 "ldr x8, [%1, #80] \n\t" \
4646 "str x8, [sp, #8] \n\t" \
4647 "ldr x8, [%1, #88] \n\t" \
4648 "str x8, [sp, #16] \n\t" \
4649 "ldr x8, [%1] \n\t" /* target->x8 */ \
4650 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4651 VALGRIND_RESTORE_STACK \
4652 "mov %0, x0" \
4653 : /*out*/ "=r" (_res) \
4654 : /*in*/ "0" (&_argvec[0]) \
4655 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4656 ); \
4657 lval = (__typeof__(lval)) _res; \
4658 } while (0)
4660 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4661 arg7,arg8,arg9,arg10,arg11, \
4662 arg12) \
4663 do { \
4664 volatile OrigFn _orig = (orig); \
4665 volatile unsigned long _argvec[13]; \
4666 volatile unsigned long _res; \
4667 _argvec[0] = (unsigned long)_orig.nraddr; \
4668 _argvec[1] = (unsigned long)(arg1); \
4669 _argvec[2] = (unsigned long)(arg2); \
4670 _argvec[3] = (unsigned long)(arg3); \
4671 _argvec[4] = (unsigned long)(arg4); \
4672 _argvec[5] = (unsigned long)(arg5); \
4673 _argvec[6] = (unsigned long)(arg6); \
4674 _argvec[7] = (unsigned long)(arg7); \
4675 _argvec[8] = (unsigned long)(arg8); \
4676 _argvec[9] = (unsigned long)(arg9); \
4677 _argvec[10] = (unsigned long)(arg10); \
4678 _argvec[11] = (unsigned long)(arg11); \
4679 _argvec[12] = (unsigned long)(arg12); \
4680 __asm__ volatile( \
4681 VALGRIND_ALIGN_STACK \
4682 "sub sp, sp, #0x30 \n\t" \
4683 "ldr x0, [%1, #8] \n\t" \
4684 "ldr x1, [%1, #16] \n\t" \
4685 "ldr x2, [%1, #24] \n\t" \
4686 "ldr x3, [%1, #32] \n\t" \
4687 "ldr x4, [%1, #40] \n\t" \
4688 "ldr x5, [%1, #48] \n\t" \
4689 "ldr x6, [%1, #56] \n\t" \
4690 "ldr x7, [%1, #64] \n\t" \
4691 "ldr x8, [%1, #72] \n\t" \
4692 "str x8, [sp, #0] \n\t" \
4693 "ldr x8, [%1, #80] \n\t" \
4694 "str x8, [sp, #8] \n\t" \
4695 "ldr x8, [%1, #88] \n\t" \
4696 "str x8, [sp, #16] \n\t" \
4697 "ldr x8, [%1, #96] \n\t" \
4698 "str x8, [sp, #24] \n\t" \
4699 "ldr x8, [%1] \n\t" /* target->x8 */ \
4700 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4701 VALGRIND_RESTORE_STACK \
4702 "mov %0, x0" \
4703 : /*out*/ "=r" (_res) \
4704 : /*in*/ "0" (&_argvec[0]) \
4705 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4706 ); \
4707 lval = (__typeof__(lval)) _res; \
4708 } while (0)
4710 #endif /* PLAT_arm64_linux */
4712 /* ------------------------- s390x-linux ------------------------- */
4714 #if defined(PLAT_s390x_linux)
4716 /* Similar workaround as amd64 (see above), but we use r11 as frame
4717 pointer and save the old r11 in r7. r11 might be used for
4718 argvec, therefore we copy argvec in r1 since r1 is clobbered
4719 after the call anyway. */
4720 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4721 # define __FRAME_POINTER \
4722 ,"d"(__builtin_dwarf_cfa())
4723 # define VALGRIND_CFI_PROLOGUE \
4724 ".cfi_remember_state\n\t" \
4725 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4726 "lgr 7,11\n\t" \
4727 "lgr 11,%2\n\t" \
4728 ".cfi_def_cfa r11, 0\n\t"
4729 # define VALGRIND_CFI_EPILOGUE \
4730 "lgr 11, 7\n\t" \
4731 ".cfi_restore_state\n\t"
4732 #else
4733 # define __FRAME_POINTER
4734 # define VALGRIND_CFI_PROLOGUE \
4735 "lgr 1,%1\n\t"
4736 # define VALGRIND_CFI_EPILOGUE
4737 #endif
4739 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4740 according to the s390 GCC maintainer. (The ABI specification is not
4741 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4742 VALGRIND_RESTORE_STACK are not defined here. */
4744 /* These regs are trashed by the hidden call. Note that we overwrite
4745 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4746 function a proper return address. All others are ABI defined call
4747 clobbers. */
4748 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4749 "f0","f1","f2","f3","f4","f5","f6","f7"
4751 /* Nb: Although r11 is modified in the asm snippets below (inside
4752 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4753 two reasons:
4754 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4755 modified
4756 (2) GCC will complain that r11 cannot appear inside a clobber section,
4757 when compiled with -O -fno-omit-frame-pointer
4760 #define CALL_FN_W_v(lval, orig) \
4761 do { \
4762 volatile OrigFn _orig = (orig); \
4763 volatile unsigned long _argvec[1]; \
4764 volatile unsigned long _res; \
4765 _argvec[0] = (unsigned long)_orig.nraddr; \
4766 __asm__ volatile( \
4767 VALGRIND_CFI_PROLOGUE \
4768 "aghi 15,-160\n\t" \
4769 "lg 1, 0(1)\n\t" /* target->r1 */ \
4770 VALGRIND_CALL_NOREDIR_R1 \
4771 "lgr %0, 2\n\t" \
4772 "aghi 15,160\n\t" \
4773 VALGRIND_CFI_EPILOGUE \
4774 : /*out*/ "=d" (_res) \
4775 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4776 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4777 ); \
4778 lval = (__typeof__(lval)) _res; \
4779 } while (0)
4781 /* The call abi has the arguments in r2-r6 and stack */
4782 #define CALL_FN_W_W(lval, orig, arg1) \
4783 do { \
4784 volatile OrigFn _orig = (orig); \
4785 volatile unsigned long _argvec[2]; \
4786 volatile unsigned long _res; \
4787 _argvec[0] = (unsigned long)_orig.nraddr; \
4788 _argvec[1] = (unsigned long)arg1; \
4789 __asm__ volatile( \
4790 VALGRIND_CFI_PROLOGUE \
4791 "aghi 15,-160\n\t" \
4792 "lg 2, 8(1)\n\t" \
4793 "lg 1, 0(1)\n\t" \
4794 VALGRIND_CALL_NOREDIR_R1 \
4795 "lgr %0, 2\n\t" \
4796 "aghi 15,160\n\t" \
4797 VALGRIND_CFI_EPILOGUE \
4798 : /*out*/ "=d" (_res) \
4799 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4800 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4801 ); \
4802 lval = (__typeof__(lval)) _res; \
4803 } while (0)
4805 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4806 do { \
4807 volatile OrigFn _orig = (orig); \
4808 volatile unsigned long _argvec[3]; \
4809 volatile unsigned long _res; \
4810 _argvec[0] = (unsigned long)_orig.nraddr; \
4811 _argvec[1] = (unsigned long)arg1; \
4812 _argvec[2] = (unsigned long)arg2; \
4813 __asm__ volatile( \
4814 VALGRIND_CFI_PROLOGUE \
4815 "aghi 15,-160\n\t" \
4816 "lg 2, 8(1)\n\t" \
4817 "lg 3,16(1)\n\t" \
4818 "lg 1, 0(1)\n\t" \
4819 VALGRIND_CALL_NOREDIR_R1 \
4820 "lgr %0, 2\n\t" \
4821 "aghi 15,160\n\t" \
4822 VALGRIND_CFI_EPILOGUE \
4823 : /*out*/ "=d" (_res) \
4824 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4825 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4826 ); \
4827 lval = (__typeof__(lval)) _res; \
4828 } while (0)
4830 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4831 do { \
4832 volatile OrigFn _orig = (orig); \
4833 volatile unsigned long _argvec[4]; \
4834 volatile unsigned long _res; \
4835 _argvec[0] = (unsigned long)_orig.nraddr; \
4836 _argvec[1] = (unsigned long)arg1; \
4837 _argvec[2] = (unsigned long)arg2; \
4838 _argvec[3] = (unsigned long)arg3; \
4839 __asm__ volatile( \
4840 VALGRIND_CFI_PROLOGUE \
4841 "aghi 15,-160\n\t" \
4842 "lg 2, 8(1)\n\t" \
4843 "lg 3,16(1)\n\t" \
4844 "lg 4,24(1)\n\t" \
4845 "lg 1, 0(1)\n\t" \
4846 VALGRIND_CALL_NOREDIR_R1 \
4847 "lgr %0, 2\n\t" \
4848 "aghi 15,160\n\t" \
4849 VALGRIND_CFI_EPILOGUE \
4850 : /*out*/ "=d" (_res) \
4851 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4852 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4853 ); \
4854 lval = (__typeof__(lval)) _res; \
4855 } while (0)
4857 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4858 do { \
4859 volatile OrigFn _orig = (orig); \
4860 volatile unsigned long _argvec[5]; \
4861 volatile unsigned long _res; \
4862 _argvec[0] = (unsigned long)_orig.nraddr; \
4863 _argvec[1] = (unsigned long)arg1; \
4864 _argvec[2] = (unsigned long)arg2; \
4865 _argvec[3] = (unsigned long)arg3; \
4866 _argvec[4] = (unsigned long)arg4; \
4867 __asm__ volatile( \
4868 VALGRIND_CFI_PROLOGUE \
4869 "aghi 15,-160\n\t" \
4870 "lg 2, 8(1)\n\t" \
4871 "lg 3,16(1)\n\t" \
4872 "lg 4,24(1)\n\t" \
4873 "lg 5,32(1)\n\t" \
4874 "lg 1, 0(1)\n\t" \
4875 VALGRIND_CALL_NOREDIR_R1 \
4876 "lgr %0, 2\n\t" \
4877 "aghi 15,160\n\t" \
4878 VALGRIND_CFI_EPILOGUE \
4879 : /*out*/ "=d" (_res) \
4880 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4881 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4882 ); \
4883 lval = (__typeof__(lval)) _res; \
4884 } while (0)
4886 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4887 do { \
4888 volatile OrigFn _orig = (orig); \
4889 volatile unsigned long _argvec[6]; \
4890 volatile unsigned long _res; \
4891 _argvec[0] = (unsigned long)_orig.nraddr; \
4892 _argvec[1] = (unsigned long)arg1; \
4893 _argvec[2] = (unsigned long)arg2; \
4894 _argvec[3] = (unsigned long)arg3; \
4895 _argvec[4] = (unsigned long)arg4; \
4896 _argvec[5] = (unsigned long)arg5; \
4897 __asm__ volatile( \
4898 VALGRIND_CFI_PROLOGUE \
4899 "aghi 15,-160\n\t" \
4900 "lg 2, 8(1)\n\t" \
4901 "lg 3,16(1)\n\t" \
4902 "lg 4,24(1)\n\t" \
4903 "lg 5,32(1)\n\t" \
4904 "lg 6,40(1)\n\t" \
4905 "lg 1, 0(1)\n\t" \
4906 VALGRIND_CALL_NOREDIR_R1 \
4907 "lgr %0, 2\n\t" \
4908 "aghi 15,160\n\t" \
4909 VALGRIND_CFI_EPILOGUE \
4910 : /*out*/ "=d" (_res) \
4911 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4912 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4913 ); \
4914 lval = (__typeof__(lval)) _res; \
4915 } while (0)
4917 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4918 arg6) \
4919 do { \
4920 volatile OrigFn _orig = (orig); \
4921 volatile unsigned long _argvec[7]; \
4922 volatile unsigned long _res; \
4923 _argvec[0] = (unsigned long)_orig.nraddr; \
4924 _argvec[1] = (unsigned long)arg1; \
4925 _argvec[2] = (unsigned long)arg2; \
4926 _argvec[3] = (unsigned long)arg3; \
4927 _argvec[4] = (unsigned long)arg4; \
4928 _argvec[5] = (unsigned long)arg5; \
4929 _argvec[6] = (unsigned long)arg6; \
4930 __asm__ volatile( \
4931 VALGRIND_CFI_PROLOGUE \
4932 "aghi 15,-168\n\t" \
4933 "lg 2, 8(1)\n\t" \
4934 "lg 3,16(1)\n\t" \
4935 "lg 4,24(1)\n\t" \
4936 "lg 5,32(1)\n\t" \
4937 "lg 6,40(1)\n\t" \
4938 "mvc 160(8,15), 48(1)\n\t" \
4939 "lg 1, 0(1)\n\t" \
4940 VALGRIND_CALL_NOREDIR_R1 \
4941 "lgr %0, 2\n\t" \
4942 "aghi 15,168\n\t" \
4943 VALGRIND_CFI_EPILOGUE \
4944 : /*out*/ "=d" (_res) \
4945 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4946 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4947 ); \
4948 lval = (__typeof__(lval)) _res; \
4949 } while (0)
4951 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4952 arg6, arg7) \
4953 do { \
4954 volatile OrigFn _orig = (orig); \
4955 volatile unsigned long _argvec[8]; \
4956 volatile unsigned long _res; \
4957 _argvec[0] = (unsigned long)_orig.nraddr; \
4958 _argvec[1] = (unsigned long)arg1; \
4959 _argvec[2] = (unsigned long)arg2; \
4960 _argvec[3] = (unsigned long)arg3; \
4961 _argvec[4] = (unsigned long)arg4; \
4962 _argvec[5] = (unsigned long)arg5; \
4963 _argvec[6] = (unsigned long)arg6; \
4964 _argvec[7] = (unsigned long)arg7; \
4965 __asm__ volatile( \
4966 VALGRIND_CFI_PROLOGUE \
4967 "aghi 15,-176\n\t" \
4968 "lg 2, 8(1)\n\t" \
4969 "lg 3,16(1)\n\t" \
4970 "lg 4,24(1)\n\t" \
4971 "lg 5,32(1)\n\t" \
4972 "lg 6,40(1)\n\t" \
4973 "mvc 160(8,15), 48(1)\n\t" \
4974 "mvc 168(8,15), 56(1)\n\t" \
4975 "lg 1, 0(1)\n\t" \
4976 VALGRIND_CALL_NOREDIR_R1 \
4977 "lgr %0, 2\n\t" \
4978 "aghi 15,176\n\t" \
4979 VALGRIND_CFI_EPILOGUE \
4980 : /*out*/ "=d" (_res) \
4981 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4982 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4983 ); \
4984 lval = (__typeof__(lval)) _res; \
4985 } while (0)
4987 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4988 arg6, arg7 ,arg8) \
4989 do { \
4990 volatile OrigFn _orig = (orig); \
4991 volatile unsigned long _argvec[9]; \
4992 volatile unsigned long _res; \
4993 _argvec[0] = (unsigned long)_orig.nraddr; \
4994 _argvec[1] = (unsigned long)arg1; \
4995 _argvec[2] = (unsigned long)arg2; \
4996 _argvec[3] = (unsigned long)arg3; \
4997 _argvec[4] = (unsigned long)arg4; \
4998 _argvec[5] = (unsigned long)arg5; \
4999 _argvec[6] = (unsigned long)arg6; \
5000 _argvec[7] = (unsigned long)arg7; \
5001 _argvec[8] = (unsigned long)arg8; \
5002 __asm__ volatile( \
5003 VALGRIND_CFI_PROLOGUE \
5004 "aghi 15,-184\n\t" \
5005 "lg 2, 8(1)\n\t" \
5006 "lg 3,16(1)\n\t" \
5007 "lg 4,24(1)\n\t" \
5008 "lg 5,32(1)\n\t" \
5009 "lg 6,40(1)\n\t" \
5010 "mvc 160(8,15), 48(1)\n\t" \
5011 "mvc 168(8,15), 56(1)\n\t" \
5012 "mvc 176(8,15), 64(1)\n\t" \
5013 "lg 1, 0(1)\n\t" \
5014 VALGRIND_CALL_NOREDIR_R1 \
5015 "lgr %0, 2\n\t" \
5016 "aghi 15,184\n\t" \
5017 VALGRIND_CFI_EPILOGUE \
5018 : /*out*/ "=d" (_res) \
5019 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5020 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5021 ); \
5022 lval = (__typeof__(lval)) _res; \
5023 } while (0)
5025 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5026 arg6, arg7 ,arg8, arg9) \
5027 do { \
5028 volatile OrigFn _orig = (orig); \
5029 volatile unsigned long _argvec[10]; \
5030 volatile unsigned long _res; \
5031 _argvec[0] = (unsigned long)_orig.nraddr; \
5032 _argvec[1] = (unsigned long)arg1; \
5033 _argvec[2] = (unsigned long)arg2; \
5034 _argvec[3] = (unsigned long)arg3; \
5035 _argvec[4] = (unsigned long)arg4; \
5036 _argvec[5] = (unsigned long)arg5; \
5037 _argvec[6] = (unsigned long)arg6; \
5038 _argvec[7] = (unsigned long)arg7; \
5039 _argvec[8] = (unsigned long)arg8; \
5040 _argvec[9] = (unsigned long)arg9; \
5041 __asm__ volatile( \
5042 VALGRIND_CFI_PROLOGUE \
5043 "aghi 15,-192\n\t" \
5044 "lg 2, 8(1)\n\t" \
5045 "lg 3,16(1)\n\t" \
5046 "lg 4,24(1)\n\t" \
5047 "lg 5,32(1)\n\t" \
5048 "lg 6,40(1)\n\t" \
5049 "mvc 160(8,15), 48(1)\n\t" \
5050 "mvc 168(8,15), 56(1)\n\t" \
5051 "mvc 176(8,15), 64(1)\n\t" \
5052 "mvc 184(8,15), 72(1)\n\t" \
5053 "lg 1, 0(1)\n\t" \
5054 VALGRIND_CALL_NOREDIR_R1 \
5055 "lgr %0, 2\n\t" \
5056 "aghi 15,192\n\t" \
5057 VALGRIND_CFI_EPILOGUE \
5058 : /*out*/ "=d" (_res) \
5059 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5060 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5061 ); \
5062 lval = (__typeof__(lval)) _res; \
5063 } while (0)
5065 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5066 arg6, arg7 ,arg8, arg9, arg10) \
5067 do { \
5068 volatile OrigFn _orig = (orig); \
5069 volatile unsigned long _argvec[11]; \
5070 volatile unsigned long _res; \
5071 _argvec[0] = (unsigned long)_orig.nraddr; \
5072 _argvec[1] = (unsigned long)arg1; \
5073 _argvec[2] = (unsigned long)arg2; \
5074 _argvec[3] = (unsigned long)arg3; \
5075 _argvec[4] = (unsigned long)arg4; \
5076 _argvec[5] = (unsigned long)arg5; \
5077 _argvec[6] = (unsigned long)arg6; \
5078 _argvec[7] = (unsigned long)arg7; \
5079 _argvec[8] = (unsigned long)arg8; \
5080 _argvec[9] = (unsigned long)arg9; \
5081 _argvec[10] = (unsigned long)arg10; \
5082 __asm__ volatile( \
5083 VALGRIND_CFI_PROLOGUE \
5084 "aghi 15,-200\n\t" \
5085 "lg 2, 8(1)\n\t" \
5086 "lg 3,16(1)\n\t" \
5087 "lg 4,24(1)\n\t" \
5088 "lg 5,32(1)\n\t" \
5089 "lg 6,40(1)\n\t" \
5090 "mvc 160(8,15), 48(1)\n\t" \
5091 "mvc 168(8,15), 56(1)\n\t" \
5092 "mvc 176(8,15), 64(1)\n\t" \
5093 "mvc 184(8,15), 72(1)\n\t" \
5094 "mvc 192(8,15), 80(1)\n\t" \
5095 "lg 1, 0(1)\n\t" \
5096 VALGRIND_CALL_NOREDIR_R1 \
5097 "lgr %0, 2\n\t" \
5098 "aghi 15,200\n\t" \
5099 VALGRIND_CFI_EPILOGUE \
5100 : /*out*/ "=d" (_res) \
5101 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5102 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5103 ); \
5104 lval = (__typeof__(lval)) _res; \
5105 } while (0)
5107 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5108 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5109 do { \
5110 volatile OrigFn _orig = (orig); \
5111 volatile unsigned long _argvec[12]; \
5112 volatile unsigned long _res; \
5113 _argvec[0] = (unsigned long)_orig.nraddr; \
5114 _argvec[1] = (unsigned long)arg1; \
5115 _argvec[2] = (unsigned long)arg2; \
5116 _argvec[3] = (unsigned long)arg3; \
5117 _argvec[4] = (unsigned long)arg4; \
5118 _argvec[5] = (unsigned long)arg5; \
5119 _argvec[6] = (unsigned long)arg6; \
5120 _argvec[7] = (unsigned long)arg7; \
5121 _argvec[8] = (unsigned long)arg8; \
5122 _argvec[9] = (unsigned long)arg9; \
5123 _argvec[10] = (unsigned long)arg10; \
5124 _argvec[11] = (unsigned long)arg11; \
5125 __asm__ volatile( \
5126 VALGRIND_CFI_PROLOGUE \
5127 "aghi 15,-208\n\t" \
5128 "lg 2, 8(1)\n\t" \
5129 "lg 3,16(1)\n\t" \
5130 "lg 4,24(1)\n\t" \
5131 "lg 5,32(1)\n\t" \
5132 "lg 6,40(1)\n\t" \
5133 "mvc 160(8,15), 48(1)\n\t" \
5134 "mvc 168(8,15), 56(1)\n\t" \
5135 "mvc 176(8,15), 64(1)\n\t" \
5136 "mvc 184(8,15), 72(1)\n\t" \
5137 "mvc 192(8,15), 80(1)\n\t" \
5138 "mvc 200(8,15), 88(1)\n\t" \
5139 "lg 1, 0(1)\n\t" \
5140 VALGRIND_CALL_NOREDIR_R1 \
5141 "lgr %0, 2\n\t" \
5142 "aghi 15,208\n\t" \
5143 VALGRIND_CFI_EPILOGUE \
5144 : /*out*/ "=d" (_res) \
5145 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5146 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5147 ); \
5148 lval = (__typeof__(lval)) _res; \
5149 } while (0)
5151 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5152 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5153 do { \
5154 volatile OrigFn _orig = (orig); \
5155 volatile unsigned long _argvec[13]; \
5156 volatile unsigned long _res; \
5157 _argvec[0] = (unsigned long)_orig.nraddr; \
5158 _argvec[1] = (unsigned long)arg1; \
5159 _argvec[2] = (unsigned long)arg2; \
5160 _argvec[3] = (unsigned long)arg3; \
5161 _argvec[4] = (unsigned long)arg4; \
5162 _argvec[5] = (unsigned long)arg5; \
5163 _argvec[6] = (unsigned long)arg6; \
5164 _argvec[7] = (unsigned long)arg7; \
5165 _argvec[8] = (unsigned long)arg8; \
5166 _argvec[9] = (unsigned long)arg9; \
5167 _argvec[10] = (unsigned long)arg10; \
5168 _argvec[11] = (unsigned long)arg11; \
5169 _argvec[12] = (unsigned long)arg12; \
5170 __asm__ volatile( \
5171 VALGRIND_CFI_PROLOGUE \
5172 "aghi 15,-216\n\t" \
5173 "lg 2, 8(1)\n\t" \
5174 "lg 3,16(1)\n\t" \
5175 "lg 4,24(1)\n\t" \
5176 "lg 5,32(1)\n\t" \
5177 "lg 6,40(1)\n\t" \
5178 "mvc 160(8,15), 48(1)\n\t" \
5179 "mvc 168(8,15), 56(1)\n\t" \
5180 "mvc 176(8,15), 64(1)\n\t" \
5181 "mvc 184(8,15), 72(1)\n\t" \
5182 "mvc 192(8,15), 80(1)\n\t" \
5183 "mvc 200(8,15), 88(1)\n\t" \
5184 "mvc 208(8,15), 96(1)\n\t" \
5185 "lg 1, 0(1)\n\t" \
5186 VALGRIND_CALL_NOREDIR_R1 \
5187 "lgr %0, 2\n\t" \
5188 "aghi 15,216\n\t" \
5189 VALGRIND_CFI_EPILOGUE \
5190 : /*out*/ "=d" (_res) \
5191 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5192 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5193 ); \
5194 lval = (__typeof__(lval)) _res; \
5195 } while (0)
5198 #endif /* PLAT_s390x_linux */
5200 /* ------------------------- mips32-linux ----------------------- */
5202 #if defined(PLAT_mips32_linux)
5204 /* These regs are trashed by the hidden call. */
5205 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5206 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5207 "$25", "$31"
5209 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5210 long) == 4. */
5212 #define CALL_FN_W_v(lval, orig) \
5213 do { \
5214 volatile OrigFn _orig = (orig); \
5215 volatile unsigned long _argvec[1]; \
5216 volatile unsigned long _res; \
5217 _argvec[0] = (unsigned long)_orig.nraddr; \
5218 __asm__ volatile( \
5219 "subu $29, $29, 8 \n\t" \
5220 "sw $28, 0($29) \n\t" \
5221 "sw $31, 4($29) \n\t" \
5222 "subu $29, $29, 16 \n\t" \
5223 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5224 VALGRIND_CALL_NOREDIR_T9 \
5225 "addu $29, $29, 16\n\t" \
5226 "lw $28, 0($29) \n\t" \
5227 "lw $31, 4($29) \n\t" \
5228 "addu $29, $29, 8 \n\t" \
5229 "move %0, $2\n" \
5230 : /*out*/ "=r" (_res) \
5231 : /*in*/ "0" (&_argvec[0]) \
5232 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5233 ); \
5234 lval = (__typeof__(lval)) _res; \
5235 } while (0)
5237 #define CALL_FN_W_W(lval, orig, arg1) \
5238 do { \
5239 volatile OrigFn _orig = (orig); \
5240 volatile unsigned long _argvec[2]; \
5241 volatile unsigned long _res; \
5242 _argvec[0] = (unsigned long)_orig.nraddr; \
5243 _argvec[1] = (unsigned long)(arg1); \
5244 __asm__ volatile( \
5245 "subu $29, $29, 8 \n\t" \
5246 "sw $28, 0($29) \n\t" \
5247 "sw $31, 4($29) \n\t" \
5248 "subu $29, $29, 16 \n\t" \
5249 "lw $4, 4(%1) \n\t" /* arg1*/ \
5250 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5251 VALGRIND_CALL_NOREDIR_T9 \
5252 "addu $29, $29, 16 \n\t" \
5253 "lw $28, 0($29) \n\t" \
5254 "lw $31, 4($29) \n\t" \
5255 "addu $29, $29, 8 \n\t" \
5256 "move %0, $2\n" \
5257 : /*out*/ "=r" (_res) \
5258 : /*in*/ "0" (&_argvec[0]) \
5259 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5260 ); \
5261 lval = (__typeof__(lval)) _res; \
5262 } while (0)
5264 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5265 do { \
5266 volatile OrigFn _orig = (orig); \
5267 volatile unsigned long _argvec[3]; \
5268 volatile unsigned long _res; \
5269 _argvec[0] = (unsigned long)_orig.nraddr; \
5270 _argvec[1] = (unsigned long)(arg1); \
5271 _argvec[2] = (unsigned long)(arg2); \
5272 __asm__ volatile( \
5273 "subu $29, $29, 8 \n\t" \
5274 "sw $28, 0($29) \n\t" \
5275 "sw $31, 4($29) \n\t" \
5276 "subu $29, $29, 16 \n\t" \
5277 "lw $4, 4(%1) \n\t" \
5278 "lw $5, 8(%1) \n\t" \
5279 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5280 VALGRIND_CALL_NOREDIR_T9 \
5281 "addu $29, $29, 16 \n\t" \
5282 "lw $28, 0($29) \n\t" \
5283 "lw $31, 4($29) \n\t" \
5284 "addu $29, $29, 8 \n\t" \
5285 "move %0, $2\n" \
5286 : /*out*/ "=r" (_res) \
5287 : /*in*/ "0" (&_argvec[0]) \
5288 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5289 ); \
5290 lval = (__typeof__(lval)) _res; \
5291 } while (0)
5293 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5294 do { \
5295 volatile OrigFn _orig = (orig); \
5296 volatile unsigned long _argvec[4]; \
5297 volatile unsigned long _res; \
5298 _argvec[0] = (unsigned long)_orig.nraddr; \
5299 _argvec[1] = (unsigned long)(arg1); \
5300 _argvec[2] = (unsigned long)(arg2); \
5301 _argvec[3] = (unsigned long)(arg3); \
5302 __asm__ volatile( \
5303 "subu $29, $29, 8 \n\t" \
5304 "sw $28, 0($29) \n\t" \
5305 "sw $31, 4($29) \n\t" \
5306 "subu $29, $29, 16 \n\t" \
5307 "lw $4, 4(%1) \n\t" \
5308 "lw $5, 8(%1) \n\t" \
5309 "lw $6, 12(%1) \n\t" \
5310 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5311 VALGRIND_CALL_NOREDIR_T9 \
5312 "addu $29, $29, 16 \n\t" \
5313 "lw $28, 0($29) \n\t" \
5314 "lw $31, 4($29) \n\t" \
5315 "addu $29, $29, 8 \n\t" \
5316 "move %0, $2\n" \
5317 : /*out*/ "=r" (_res) \
5318 : /*in*/ "0" (&_argvec[0]) \
5319 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5320 ); \
5321 lval = (__typeof__(lval)) _res; \
5322 } while (0)
5324 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5325 do { \
5326 volatile OrigFn _orig = (orig); \
5327 volatile unsigned long _argvec[5]; \
5328 volatile unsigned long _res; \
5329 _argvec[0] = (unsigned long)_orig.nraddr; \
5330 _argvec[1] = (unsigned long)(arg1); \
5331 _argvec[2] = (unsigned long)(arg2); \
5332 _argvec[3] = (unsigned long)(arg3); \
5333 _argvec[4] = (unsigned long)(arg4); \
5334 __asm__ volatile( \
5335 "subu $29, $29, 8 \n\t" \
5336 "sw $28, 0($29) \n\t" \
5337 "sw $31, 4($29) \n\t" \
5338 "subu $29, $29, 16 \n\t" \
5339 "lw $4, 4(%1) \n\t" \
5340 "lw $5, 8(%1) \n\t" \
5341 "lw $6, 12(%1) \n\t" \
5342 "lw $7, 16(%1) \n\t" \
5343 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5344 VALGRIND_CALL_NOREDIR_T9 \
5345 "addu $29, $29, 16 \n\t" \
5346 "lw $28, 0($29) \n\t" \
5347 "lw $31, 4($29) \n\t" \
5348 "addu $29, $29, 8 \n\t" \
5349 "move %0, $2\n" \
5350 : /*out*/ "=r" (_res) \
5351 : /*in*/ "0" (&_argvec[0]) \
5352 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5353 ); \
5354 lval = (__typeof__(lval)) _res; \
5355 } while (0)
5357 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5358 do { \
5359 volatile OrigFn _orig = (orig); \
5360 volatile unsigned long _argvec[6]; \
5361 volatile unsigned long _res; \
5362 _argvec[0] = (unsigned long)_orig.nraddr; \
5363 _argvec[1] = (unsigned long)(arg1); \
5364 _argvec[2] = (unsigned long)(arg2); \
5365 _argvec[3] = (unsigned long)(arg3); \
5366 _argvec[4] = (unsigned long)(arg4); \
5367 _argvec[5] = (unsigned long)(arg5); \
5368 __asm__ volatile( \
5369 "subu $29, $29, 8 \n\t" \
5370 "sw $28, 0($29) \n\t" \
5371 "sw $31, 4($29) \n\t" \
5372 "lw $4, 20(%1) \n\t" \
5373 "subu $29, $29, 24\n\t" \
5374 "sw $4, 16($29) \n\t" \
5375 "lw $4, 4(%1) \n\t" \
5376 "lw $5, 8(%1) \n\t" \
5377 "lw $6, 12(%1) \n\t" \
5378 "lw $7, 16(%1) \n\t" \
5379 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5380 VALGRIND_CALL_NOREDIR_T9 \
5381 "addu $29, $29, 24 \n\t" \
5382 "lw $28, 0($29) \n\t" \
5383 "lw $31, 4($29) \n\t" \
5384 "addu $29, $29, 8 \n\t" \
5385 "move %0, $2\n" \
5386 : /*out*/ "=r" (_res) \
5387 : /*in*/ "0" (&_argvec[0]) \
5388 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5389 ); \
5390 lval = (__typeof__(lval)) _res; \
5391 } while (0)
5392 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5393 do { \
5394 volatile OrigFn _orig = (orig); \
5395 volatile unsigned long _argvec[7]; \
5396 volatile unsigned long _res; \
5397 _argvec[0] = (unsigned long)_orig.nraddr; \
5398 _argvec[1] = (unsigned long)(arg1); \
5399 _argvec[2] = (unsigned long)(arg2); \
5400 _argvec[3] = (unsigned long)(arg3); \
5401 _argvec[4] = (unsigned long)(arg4); \
5402 _argvec[5] = (unsigned long)(arg5); \
5403 _argvec[6] = (unsigned long)(arg6); \
5404 __asm__ volatile( \
5405 "subu $29, $29, 8 \n\t" \
5406 "sw $28, 0($29) \n\t" \
5407 "sw $31, 4($29) \n\t" \
5408 "lw $4, 20(%1) \n\t" \
5409 "subu $29, $29, 32\n\t" \
5410 "sw $4, 16($29) \n\t" \
5411 "lw $4, 24(%1) \n\t" \
5412 "nop\n\t" \
5413 "sw $4, 20($29) \n\t" \
5414 "lw $4, 4(%1) \n\t" \
5415 "lw $5, 8(%1) \n\t" \
5416 "lw $6, 12(%1) \n\t" \
5417 "lw $7, 16(%1) \n\t" \
5418 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5419 VALGRIND_CALL_NOREDIR_T9 \
5420 "addu $29, $29, 32 \n\t" \
5421 "lw $28, 0($29) \n\t" \
5422 "lw $31, 4($29) \n\t" \
5423 "addu $29, $29, 8 \n\t" \
5424 "move %0, $2\n" \
5425 : /*out*/ "=r" (_res) \
5426 : /*in*/ "0" (&_argvec[0]) \
5427 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5428 ); \
5429 lval = (__typeof__(lval)) _res; \
5430 } while (0)
5432 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5433 arg7) \
5434 do { \
5435 volatile OrigFn _orig = (orig); \
5436 volatile unsigned long _argvec[8]; \
5437 volatile unsigned long _res; \
5438 _argvec[0] = (unsigned long)_orig.nraddr; \
5439 _argvec[1] = (unsigned long)(arg1); \
5440 _argvec[2] = (unsigned long)(arg2); \
5441 _argvec[3] = (unsigned long)(arg3); \
5442 _argvec[4] = (unsigned long)(arg4); \
5443 _argvec[5] = (unsigned long)(arg5); \
5444 _argvec[6] = (unsigned long)(arg6); \
5445 _argvec[7] = (unsigned long)(arg7); \
5446 __asm__ volatile( \
5447 "subu $29, $29, 8 \n\t" \
5448 "sw $28, 0($29) \n\t" \
5449 "sw $31, 4($29) \n\t" \
5450 "lw $4, 20(%1) \n\t" \
5451 "subu $29, $29, 32\n\t" \
5452 "sw $4, 16($29) \n\t" \
5453 "lw $4, 24(%1) \n\t" \
5454 "sw $4, 20($29) \n\t" \
5455 "lw $4, 28(%1) \n\t" \
5456 "sw $4, 24($29) \n\t" \
5457 "lw $4, 4(%1) \n\t" \
5458 "lw $5, 8(%1) \n\t" \
5459 "lw $6, 12(%1) \n\t" \
5460 "lw $7, 16(%1) \n\t" \
5461 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5462 VALGRIND_CALL_NOREDIR_T9 \
5463 "addu $29, $29, 32 \n\t" \
5464 "lw $28, 0($29) \n\t" \
5465 "lw $31, 4($29) \n\t" \
5466 "addu $29, $29, 8 \n\t" \
5467 "move %0, $2\n" \
5468 : /*out*/ "=r" (_res) \
5469 : /*in*/ "0" (&_argvec[0]) \
5470 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5471 ); \
5472 lval = (__typeof__(lval)) _res; \
5473 } while (0)
5475 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5476 arg7,arg8) \
5477 do { \
5478 volatile OrigFn _orig = (orig); \
5479 volatile unsigned long _argvec[9]; \
5480 volatile unsigned long _res; \
5481 _argvec[0] = (unsigned long)_orig.nraddr; \
5482 _argvec[1] = (unsigned long)(arg1); \
5483 _argvec[2] = (unsigned long)(arg2); \
5484 _argvec[3] = (unsigned long)(arg3); \
5485 _argvec[4] = (unsigned long)(arg4); \
5486 _argvec[5] = (unsigned long)(arg5); \
5487 _argvec[6] = (unsigned long)(arg6); \
5488 _argvec[7] = (unsigned long)(arg7); \
5489 _argvec[8] = (unsigned long)(arg8); \
5490 __asm__ volatile( \
5491 "subu $29, $29, 8 \n\t" \
5492 "sw $28, 0($29) \n\t" \
5493 "sw $31, 4($29) \n\t" \
5494 "lw $4, 20(%1) \n\t" \
5495 "subu $29, $29, 40\n\t" \
5496 "sw $4, 16($29) \n\t" \
5497 "lw $4, 24(%1) \n\t" \
5498 "sw $4, 20($29) \n\t" \
5499 "lw $4, 28(%1) \n\t" \
5500 "sw $4, 24($29) \n\t" \
5501 "lw $4, 32(%1) \n\t" \
5502 "sw $4, 28($29) \n\t" \
5503 "lw $4, 4(%1) \n\t" \
5504 "lw $5, 8(%1) \n\t" \
5505 "lw $6, 12(%1) \n\t" \
5506 "lw $7, 16(%1) \n\t" \
5507 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5508 VALGRIND_CALL_NOREDIR_T9 \
5509 "addu $29, $29, 40 \n\t" \
5510 "lw $28, 0($29) \n\t" \
5511 "lw $31, 4($29) \n\t" \
5512 "addu $29, $29, 8 \n\t" \
5513 "move %0, $2\n" \
5514 : /*out*/ "=r" (_res) \
5515 : /*in*/ "0" (&_argvec[0]) \
5516 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5517 ); \
5518 lval = (__typeof__(lval)) _res; \
5519 } while (0)
5521 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5522 arg7,arg8,arg9) \
5523 do { \
5524 volatile OrigFn _orig = (orig); \
5525 volatile unsigned long _argvec[10]; \
5526 volatile unsigned long _res; \
5527 _argvec[0] = (unsigned long)_orig.nraddr; \
5528 _argvec[1] = (unsigned long)(arg1); \
5529 _argvec[2] = (unsigned long)(arg2); \
5530 _argvec[3] = (unsigned long)(arg3); \
5531 _argvec[4] = (unsigned long)(arg4); \
5532 _argvec[5] = (unsigned long)(arg5); \
5533 _argvec[6] = (unsigned long)(arg6); \
5534 _argvec[7] = (unsigned long)(arg7); \
5535 _argvec[8] = (unsigned long)(arg8); \
5536 _argvec[9] = (unsigned long)(arg9); \
5537 __asm__ volatile( \
5538 "subu $29, $29, 8 \n\t" \
5539 "sw $28, 0($29) \n\t" \
5540 "sw $31, 4($29) \n\t" \
5541 "lw $4, 20(%1) \n\t" \
5542 "subu $29, $29, 40\n\t" \
5543 "sw $4, 16($29) \n\t" \
5544 "lw $4, 24(%1) \n\t" \
5545 "sw $4, 20($29) \n\t" \
5546 "lw $4, 28(%1) \n\t" \
5547 "sw $4, 24($29) \n\t" \
5548 "lw $4, 32(%1) \n\t" \
5549 "sw $4, 28($29) \n\t" \
5550 "lw $4, 36(%1) \n\t" \
5551 "sw $4, 32($29) \n\t" \
5552 "lw $4, 4(%1) \n\t" \
5553 "lw $5, 8(%1) \n\t" \
5554 "lw $6, 12(%1) \n\t" \
5555 "lw $7, 16(%1) \n\t" \
5556 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5557 VALGRIND_CALL_NOREDIR_T9 \
5558 "addu $29, $29, 40 \n\t" \
5559 "lw $28, 0($29) \n\t" \
5560 "lw $31, 4($29) \n\t" \
5561 "addu $29, $29, 8 \n\t" \
5562 "move %0, $2\n" \
5563 : /*out*/ "=r" (_res) \
5564 : /*in*/ "0" (&_argvec[0]) \
5565 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5566 ); \
5567 lval = (__typeof__(lval)) _res; \
5568 } while (0)
5570 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5571 arg7,arg8,arg9,arg10) \
5572 do { \
5573 volatile OrigFn _orig = (orig); \
5574 volatile unsigned long _argvec[11]; \
5575 volatile unsigned long _res; \
5576 _argvec[0] = (unsigned long)_orig.nraddr; \
5577 _argvec[1] = (unsigned long)(arg1); \
5578 _argvec[2] = (unsigned long)(arg2); \
5579 _argvec[3] = (unsigned long)(arg3); \
5580 _argvec[4] = (unsigned long)(arg4); \
5581 _argvec[5] = (unsigned long)(arg5); \
5582 _argvec[6] = (unsigned long)(arg6); \
5583 _argvec[7] = (unsigned long)(arg7); \
5584 _argvec[8] = (unsigned long)(arg8); \
5585 _argvec[9] = (unsigned long)(arg9); \
5586 _argvec[10] = (unsigned long)(arg10); \
5587 __asm__ volatile( \
5588 "subu $29, $29, 8 \n\t" \
5589 "sw $28, 0($29) \n\t" \
5590 "sw $31, 4($29) \n\t" \
5591 "lw $4, 20(%1) \n\t" \
5592 "subu $29, $29, 48\n\t" \
5593 "sw $4, 16($29) \n\t" \
5594 "lw $4, 24(%1) \n\t" \
5595 "sw $4, 20($29) \n\t" \
5596 "lw $4, 28(%1) \n\t" \
5597 "sw $4, 24($29) \n\t" \
5598 "lw $4, 32(%1) \n\t" \
5599 "sw $4, 28($29) \n\t" \
5600 "lw $4, 36(%1) \n\t" \
5601 "sw $4, 32($29) \n\t" \
5602 "lw $4, 40(%1) \n\t" \
5603 "sw $4, 36($29) \n\t" \
5604 "lw $4, 4(%1) \n\t" \
5605 "lw $5, 8(%1) \n\t" \
5606 "lw $6, 12(%1) \n\t" \
5607 "lw $7, 16(%1) \n\t" \
5608 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5609 VALGRIND_CALL_NOREDIR_T9 \
5610 "addu $29, $29, 48 \n\t" \
5611 "lw $28, 0($29) \n\t" \
5612 "lw $31, 4($29) \n\t" \
5613 "addu $29, $29, 8 \n\t" \
5614 "move %0, $2\n" \
5615 : /*out*/ "=r" (_res) \
5616 : /*in*/ "0" (&_argvec[0]) \
5617 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5618 ); \
5619 lval = (__typeof__(lval)) _res; \
5620 } while (0)
5622 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5623 arg6,arg7,arg8,arg9,arg10, \
5624 arg11) \
5625 do { \
5626 volatile OrigFn _orig = (orig); \
5627 volatile unsigned long _argvec[12]; \
5628 volatile unsigned long _res; \
5629 _argvec[0] = (unsigned long)_orig.nraddr; \
5630 _argvec[1] = (unsigned long)(arg1); \
5631 _argvec[2] = (unsigned long)(arg2); \
5632 _argvec[3] = (unsigned long)(arg3); \
5633 _argvec[4] = (unsigned long)(arg4); \
5634 _argvec[5] = (unsigned long)(arg5); \
5635 _argvec[6] = (unsigned long)(arg6); \
5636 _argvec[7] = (unsigned long)(arg7); \
5637 _argvec[8] = (unsigned long)(arg8); \
5638 _argvec[9] = (unsigned long)(arg9); \
5639 _argvec[10] = (unsigned long)(arg10); \
5640 _argvec[11] = (unsigned long)(arg11); \
5641 __asm__ volatile( \
5642 "subu $29, $29, 8 \n\t" \
5643 "sw $28, 0($29) \n\t" \
5644 "sw $31, 4($29) \n\t" \
5645 "lw $4, 20(%1) \n\t" \
5646 "subu $29, $29, 48\n\t" \
5647 "sw $4, 16($29) \n\t" \
5648 "lw $4, 24(%1) \n\t" \
5649 "sw $4, 20($29) \n\t" \
5650 "lw $4, 28(%1) \n\t" \
5651 "sw $4, 24($29) \n\t" \
5652 "lw $4, 32(%1) \n\t" \
5653 "sw $4, 28($29) \n\t" \
5654 "lw $4, 36(%1) \n\t" \
5655 "sw $4, 32($29) \n\t" \
5656 "lw $4, 40(%1) \n\t" \
5657 "sw $4, 36($29) \n\t" \
5658 "lw $4, 44(%1) \n\t" \
5659 "sw $4, 40($29) \n\t" \
5660 "lw $4, 4(%1) \n\t" \
5661 "lw $5, 8(%1) \n\t" \
5662 "lw $6, 12(%1) \n\t" \
5663 "lw $7, 16(%1) \n\t" \
5664 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5665 VALGRIND_CALL_NOREDIR_T9 \
5666 "addu $29, $29, 48 \n\t" \
5667 "lw $28, 0($29) \n\t" \
5668 "lw $31, 4($29) \n\t" \
5669 "addu $29, $29, 8 \n\t" \
5670 "move %0, $2\n" \
5671 : /*out*/ "=r" (_res) \
5672 : /*in*/ "0" (&_argvec[0]) \
5673 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5674 ); \
5675 lval = (__typeof__(lval)) _res; \
5676 } while (0)
5678 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5679 arg6,arg7,arg8,arg9,arg10, \
5680 arg11,arg12) \
5681 do { \
5682 volatile OrigFn _orig = (orig); \
5683 volatile unsigned long _argvec[13]; \
5684 volatile unsigned long _res; \
5685 _argvec[0] = (unsigned long)_orig.nraddr; \
5686 _argvec[1] = (unsigned long)(arg1); \
5687 _argvec[2] = (unsigned long)(arg2); \
5688 _argvec[3] = (unsigned long)(arg3); \
5689 _argvec[4] = (unsigned long)(arg4); \
5690 _argvec[5] = (unsigned long)(arg5); \
5691 _argvec[6] = (unsigned long)(arg6); \
5692 _argvec[7] = (unsigned long)(arg7); \
5693 _argvec[8] = (unsigned long)(arg8); \
5694 _argvec[9] = (unsigned long)(arg9); \
5695 _argvec[10] = (unsigned long)(arg10); \
5696 _argvec[11] = (unsigned long)(arg11); \
5697 _argvec[12] = (unsigned long)(arg12); \
5698 __asm__ volatile( \
5699 "subu $29, $29, 8 \n\t" \
5700 "sw $28, 0($29) \n\t" \
5701 "sw $31, 4($29) \n\t" \
5702 "lw $4, 20(%1) \n\t" \
5703 "subu $29, $29, 56\n\t" \
5704 "sw $4, 16($29) \n\t" \
5705 "lw $4, 24(%1) \n\t" \
5706 "sw $4, 20($29) \n\t" \
5707 "lw $4, 28(%1) \n\t" \
5708 "sw $4, 24($29) \n\t" \
5709 "lw $4, 32(%1) \n\t" \
5710 "sw $4, 28($29) \n\t" \
5711 "lw $4, 36(%1) \n\t" \
5712 "sw $4, 32($29) \n\t" \
5713 "lw $4, 40(%1) \n\t" \
5714 "sw $4, 36($29) \n\t" \
5715 "lw $4, 44(%1) \n\t" \
5716 "sw $4, 40($29) \n\t" \
5717 "lw $4, 48(%1) \n\t" \
5718 "sw $4, 44($29) \n\t" \
5719 "lw $4, 4(%1) \n\t" \
5720 "lw $5, 8(%1) \n\t" \
5721 "lw $6, 12(%1) \n\t" \
5722 "lw $7, 16(%1) \n\t" \
5723 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5724 VALGRIND_CALL_NOREDIR_T9 \
5725 "addu $29, $29, 56 \n\t" \
5726 "lw $28, 0($29) \n\t" \
5727 "lw $31, 4($29) \n\t" \
5728 "addu $29, $29, 8 \n\t" \
5729 "move %0, $2\n" \
5730 : /*out*/ "=r" (_res) \
5731 : /*in*/ "r" (&_argvec[0]) \
5732 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5733 ); \
5734 lval = (__typeof__(lval)) _res; \
5735 } while (0)
5737 #endif /* PLAT_mips32_linux */
5739 /* ------------------------- mips64-linux ------------------------- */
5741 #if defined(PLAT_mips64_linux)
5743 /* These regs are trashed by the hidden call. */
5744 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5745 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5746 "$25", "$31"
5748 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5749 long) == 4. */
5751 #define CALL_FN_W_v(lval, orig) \
5752 do { \
5753 volatile OrigFn _orig = (orig); \
5754 volatile unsigned long _argvec[1]; \
5755 volatile unsigned long _res; \
5756 _argvec[0] = (unsigned long)_orig.nraddr; \
5757 __asm__ volatile( \
5758 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5759 VALGRIND_CALL_NOREDIR_T9 \
5760 "move %0, $2\n" \
5761 : /*out*/ "=r" (_res) \
5762 : /*in*/ "0" (&_argvec[0]) \
5763 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5764 ); \
5765 lval = (__typeof__(lval)) _res; \
5766 } while (0)
5768 #define CALL_FN_W_W(lval, orig, arg1) \
5769 do { \
5770 volatile OrigFn _orig = (orig); \
5771 volatile unsigned long _argvec[2]; \
5772 volatile unsigned long _res; \
5773 _argvec[0] = (unsigned long)_orig.nraddr; \
5774 _argvec[1] = (unsigned long)(arg1); \
5775 __asm__ volatile( \
5776 "ld $4, 8(%1)\n\t" /* arg1*/ \
5777 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5778 VALGRIND_CALL_NOREDIR_T9 \
5779 "move %0, $2\n" \
5780 : /*out*/ "=r" (_res) \
5781 : /*in*/ "r" (&_argvec[0]) \
5782 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5783 ); \
5784 lval = (__typeof__(lval)) _res; \
5785 } while (0)
5787 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5788 do { \
5789 volatile OrigFn _orig = (orig); \
5790 volatile unsigned long _argvec[3]; \
5791 volatile unsigned long _res; \
5792 _argvec[0] = (unsigned long)_orig.nraddr; \
5793 _argvec[1] = (unsigned long)(arg1); \
5794 _argvec[2] = (unsigned long)(arg2); \
5795 __asm__ volatile( \
5796 "ld $4, 8(%1)\n\t" \
5797 "ld $5, 16(%1)\n\t" \
5798 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5799 VALGRIND_CALL_NOREDIR_T9 \
5800 "move %0, $2\n" \
5801 : /*out*/ "=r" (_res) \
5802 : /*in*/ "r" (&_argvec[0]) \
5803 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5804 ); \
5805 lval = (__typeof__(lval)) _res; \
5806 } while (0)
5808 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5809 do { \
5810 volatile OrigFn _orig = (orig); \
5811 volatile unsigned long _argvec[4]; \
5812 volatile unsigned long _res; \
5813 _argvec[0] = (unsigned long)_orig.nraddr; \
5814 _argvec[1] = (unsigned long)(arg1); \
5815 _argvec[2] = (unsigned long)(arg2); \
5816 _argvec[3] = (unsigned long)(arg3); \
5817 __asm__ volatile( \
5818 "ld $4, 8(%1)\n\t" \
5819 "ld $5, 16(%1)\n\t" \
5820 "ld $6, 24(%1)\n\t" \
5821 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5822 VALGRIND_CALL_NOREDIR_T9 \
5823 "move %0, $2\n" \
5824 : /*out*/ "=r" (_res) \
5825 : /*in*/ "r" (&_argvec[0]) \
5826 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5827 ); \
5828 lval = (__typeof__(lval)) _res; \
5829 } while (0)
5831 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5832 do { \
5833 volatile OrigFn _orig = (orig); \
5834 volatile unsigned long _argvec[5]; \
5835 volatile unsigned long _res; \
5836 _argvec[0] = (unsigned long)_orig.nraddr; \
5837 _argvec[1] = (unsigned long)(arg1); \
5838 _argvec[2] = (unsigned long)(arg2); \
5839 _argvec[3] = (unsigned long)(arg3); \
5840 _argvec[4] = (unsigned long)(arg4); \
5841 __asm__ volatile( \
5842 "ld $4, 8(%1)\n\t" \
5843 "ld $5, 16(%1)\n\t" \
5844 "ld $6, 24(%1)\n\t" \
5845 "ld $7, 32(%1)\n\t" \
5846 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5847 VALGRIND_CALL_NOREDIR_T9 \
5848 "move %0, $2\n" \
5849 : /*out*/ "=r" (_res) \
5850 : /*in*/ "r" (&_argvec[0]) \
5851 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5852 ); \
5853 lval = (__typeof__(lval)) _res; \
5854 } while (0)
5856 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5857 do { \
5858 volatile OrigFn _orig = (orig); \
5859 volatile unsigned long _argvec[6]; \
5860 volatile unsigned long _res; \
5861 _argvec[0] = (unsigned long)_orig.nraddr; \
5862 _argvec[1] = (unsigned long)(arg1); \
5863 _argvec[2] = (unsigned long)(arg2); \
5864 _argvec[3] = (unsigned long)(arg3); \
5865 _argvec[4] = (unsigned long)(arg4); \
5866 _argvec[5] = (unsigned long)(arg5); \
5867 __asm__ volatile( \
5868 "ld $4, 8(%1)\n\t" \
5869 "ld $5, 16(%1)\n\t" \
5870 "ld $6, 24(%1)\n\t" \
5871 "ld $7, 32(%1)\n\t" \
5872 "ld $8, 40(%1)\n\t" \
5873 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5874 VALGRIND_CALL_NOREDIR_T9 \
5875 "move %0, $2\n" \
5876 : /*out*/ "=r" (_res) \
5877 : /*in*/ "r" (&_argvec[0]) \
5878 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5879 ); \
5880 lval = (__typeof__(lval)) _res; \
5881 } while (0)
5883 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5884 do { \
5885 volatile OrigFn _orig = (orig); \
5886 volatile unsigned long _argvec[7]; \
5887 volatile unsigned long _res; \
5888 _argvec[0] = (unsigned long)_orig.nraddr; \
5889 _argvec[1] = (unsigned long)(arg1); \
5890 _argvec[2] = (unsigned long)(arg2); \
5891 _argvec[3] = (unsigned long)(arg3); \
5892 _argvec[4] = (unsigned long)(arg4); \
5893 _argvec[5] = (unsigned long)(arg5); \
5894 _argvec[6] = (unsigned long)(arg6); \
5895 __asm__ volatile( \
5896 "ld $4, 8(%1)\n\t" \
5897 "ld $5, 16(%1)\n\t" \
5898 "ld $6, 24(%1)\n\t" \
5899 "ld $7, 32(%1)\n\t" \
5900 "ld $8, 40(%1)\n\t" \
5901 "ld $9, 48(%1)\n\t" \
5902 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5903 VALGRIND_CALL_NOREDIR_T9 \
5904 "move %0, $2\n" \
5905 : /*out*/ "=r" (_res) \
5906 : /*in*/ "r" (&_argvec[0]) \
5907 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5908 ); \
5909 lval = (__typeof__(lval)) _res; \
5910 } while (0)
5912 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5913 arg7) \
5914 do { \
5915 volatile OrigFn _orig = (orig); \
5916 volatile unsigned long _argvec[8]; \
5917 volatile unsigned long _res; \
5918 _argvec[0] = (unsigned long)_orig.nraddr; \
5919 _argvec[1] = (unsigned long)(arg1); \
5920 _argvec[2] = (unsigned long)(arg2); \
5921 _argvec[3] = (unsigned long)(arg3); \
5922 _argvec[4] = (unsigned long)(arg4); \
5923 _argvec[5] = (unsigned long)(arg5); \
5924 _argvec[6] = (unsigned long)(arg6); \
5925 _argvec[7] = (unsigned long)(arg7); \
5926 __asm__ volatile( \
5927 "ld $4, 8(%1)\n\t" \
5928 "ld $5, 16(%1)\n\t" \
5929 "ld $6, 24(%1)\n\t" \
5930 "ld $7, 32(%1)\n\t" \
5931 "ld $8, 40(%1)\n\t" \
5932 "ld $9, 48(%1)\n\t" \
5933 "ld $10, 56(%1)\n\t" \
5934 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5935 VALGRIND_CALL_NOREDIR_T9 \
5936 "move %0, $2\n" \
5937 : /*out*/ "=r" (_res) \
5938 : /*in*/ "r" (&_argvec[0]) \
5939 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5940 ); \
5941 lval = (__typeof__(lval)) _res; \
5942 } while (0)
5944 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5945 arg7,arg8) \
5946 do { \
5947 volatile OrigFn _orig = (orig); \
5948 volatile unsigned long _argvec[9]; \
5949 volatile unsigned long _res; \
5950 _argvec[0] = (unsigned long)_orig.nraddr; \
5951 _argvec[1] = (unsigned long)(arg1); \
5952 _argvec[2] = (unsigned long)(arg2); \
5953 _argvec[3] = (unsigned long)(arg3); \
5954 _argvec[4] = (unsigned long)(arg4); \
5955 _argvec[5] = (unsigned long)(arg5); \
5956 _argvec[6] = (unsigned long)(arg6); \
5957 _argvec[7] = (unsigned long)(arg7); \
5958 _argvec[8] = (unsigned long)(arg8); \
5959 __asm__ volatile( \
5960 "ld $4, 8(%1)\n\t" \
5961 "ld $5, 16(%1)\n\t" \
5962 "ld $6, 24(%1)\n\t" \
5963 "ld $7, 32(%1)\n\t" \
5964 "ld $8, 40(%1)\n\t" \
5965 "ld $9, 48(%1)\n\t" \
5966 "ld $10, 56(%1)\n\t" \
5967 "ld $11, 64(%1)\n\t" \
5968 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5969 VALGRIND_CALL_NOREDIR_T9 \
5970 "move %0, $2\n" \
5971 : /*out*/ "=r" (_res) \
5972 : /*in*/ "r" (&_argvec[0]) \
5973 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5974 ); \
5975 lval = (__typeof__(lval)) _res; \
5976 } while (0)
5978 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5979 arg7,arg8,arg9) \
5980 do { \
5981 volatile OrigFn _orig = (orig); \
5982 volatile unsigned long _argvec[10]; \
5983 volatile unsigned long _res; \
5984 _argvec[0] = (unsigned long)_orig.nraddr; \
5985 _argvec[1] = (unsigned long)(arg1); \
5986 _argvec[2] = (unsigned long)(arg2); \
5987 _argvec[3] = (unsigned long)(arg3); \
5988 _argvec[4] = (unsigned long)(arg4); \
5989 _argvec[5] = (unsigned long)(arg5); \
5990 _argvec[6] = (unsigned long)(arg6); \
5991 _argvec[7] = (unsigned long)(arg7); \
5992 _argvec[8] = (unsigned long)(arg8); \
5993 _argvec[9] = (unsigned long)(arg9); \
5994 __asm__ volatile( \
5995 "dsubu $29, $29, 8\n\t" \
5996 "ld $4, 72(%1)\n\t" \
5997 "sd $4, 0($29)\n\t" \
5998 "ld $4, 8(%1)\n\t" \
5999 "ld $5, 16(%1)\n\t" \
6000 "ld $6, 24(%1)\n\t" \
6001 "ld $7, 32(%1)\n\t" \
6002 "ld $8, 40(%1)\n\t" \
6003 "ld $9, 48(%1)\n\t" \
6004 "ld $10, 56(%1)\n\t" \
6005 "ld $11, 64(%1)\n\t" \
6006 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6007 VALGRIND_CALL_NOREDIR_T9 \
6008 "daddu $29, $29, 8\n\t" \
6009 "move %0, $2\n" \
6010 : /*out*/ "=r" (_res) \
6011 : /*in*/ "r" (&_argvec[0]) \
6012 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6013 ); \
6014 lval = (__typeof__(lval)) _res; \
6015 } while (0)
6017 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6018 arg7,arg8,arg9,arg10) \
6019 do { \
6020 volatile OrigFn _orig = (orig); \
6021 volatile unsigned long _argvec[11]; \
6022 volatile unsigned long _res; \
6023 _argvec[0] = (unsigned long)_orig.nraddr; \
6024 _argvec[1] = (unsigned long)(arg1); \
6025 _argvec[2] = (unsigned long)(arg2); \
6026 _argvec[3] = (unsigned long)(arg3); \
6027 _argvec[4] = (unsigned long)(arg4); \
6028 _argvec[5] = (unsigned long)(arg5); \
6029 _argvec[6] = (unsigned long)(arg6); \
6030 _argvec[7] = (unsigned long)(arg7); \
6031 _argvec[8] = (unsigned long)(arg8); \
6032 _argvec[9] = (unsigned long)(arg9); \
6033 _argvec[10] = (unsigned long)(arg10); \
6034 __asm__ volatile( \
6035 "dsubu $29, $29, 16\n\t" \
6036 "ld $4, 72(%1)\n\t" \
6037 "sd $4, 0($29)\n\t" \
6038 "ld $4, 80(%1)\n\t" \
6039 "sd $4, 8($29)\n\t" \
6040 "ld $4, 8(%1)\n\t" \
6041 "ld $5, 16(%1)\n\t" \
6042 "ld $6, 24(%1)\n\t" \
6043 "ld $7, 32(%1)\n\t" \
6044 "ld $8, 40(%1)\n\t" \
6045 "ld $9, 48(%1)\n\t" \
6046 "ld $10, 56(%1)\n\t" \
6047 "ld $11, 64(%1)\n\t" \
6048 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6049 VALGRIND_CALL_NOREDIR_T9 \
6050 "daddu $29, $29, 16\n\t" \
6051 "move %0, $2\n" \
6052 : /*out*/ "=r" (_res) \
6053 : /*in*/ "r" (&_argvec[0]) \
6054 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6055 ); \
6056 lval = (__typeof__(lval)) _res; \
6057 } while (0)
6059 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6060 arg6,arg7,arg8,arg9,arg10, \
6061 arg11) \
6062 do { \
6063 volatile OrigFn _orig = (orig); \
6064 volatile unsigned long _argvec[12]; \
6065 volatile unsigned long _res; \
6066 _argvec[0] = (unsigned long)_orig.nraddr; \
6067 _argvec[1] = (unsigned long)(arg1); \
6068 _argvec[2] = (unsigned long)(arg2); \
6069 _argvec[3] = (unsigned long)(arg3); \
6070 _argvec[4] = (unsigned long)(arg4); \
6071 _argvec[5] = (unsigned long)(arg5); \
6072 _argvec[6] = (unsigned long)(arg6); \
6073 _argvec[7] = (unsigned long)(arg7); \
6074 _argvec[8] = (unsigned long)(arg8); \
6075 _argvec[9] = (unsigned long)(arg9); \
6076 _argvec[10] = (unsigned long)(arg10); \
6077 _argvec[11] = (unsigned long)(arg11); \
6078 __asm__ volatile( \
6079 "dsubu $29, $29, 24\n\t" \
6080 "ld $4, 72(%1)\n\t" \
6081 "sd $4, 0($29)\n\t" \
6082 "ld $4, 80(%1)\n\t" \
6083 "sd $4, 8($29)\n\t" \
6084 "ld $4, 88(%1)\n\t" \
6085 "sd $4, 16($29)\n\t" \
6086 "ld $4, 8(%1)\n\t" \
6087 "ld $5, 16(%1)\n\t" \
6088 "ld $6, 24(%1)\n\t" \
6089 "ld $7, 32(%1)\n\t" \
6090 "ld $8, 40(%1)\n\t" \
6091 "ld $9, 48(%1)\n\t" \
6092 "ld $10, 56(%1)\n\t" \
6093 "ld $11, 64(%1)\n\t" \
6094 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6095 VALGRIND_CALL_NOREDIR_T9 \
6096 "daddu $29, $29, 24\n\t" \
6097 "move %0, $2\n" \
6098 : /*out*/ "=r" (_res) \
6099 : /*in*/ "r" (&_argvec[0]) \
6100 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6101 ); \
6102 lval = (__typeof__(lval)) _res; \
6103 } while (0)
6105 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6106 arg6,arg7,arg8,arg9,arg10, \
6107 arg11,arg12) \
6108 do { \
6109 volatile OrigFn _orig = (orig); \
6110 volatile unsigned long _argvec[13]; \
6111 volatile unsigned long _res; \
6112 _argvec[0] = (unsigned long)_orig.nraddr; \
6113 _argvec[1] = (unsigned long)(arg1); \
6114 _argvec[2] = (unsigned long)(arg2); \
6115 _argvec[3] = (unsigned long)(arg3); \
6116 _argvec[4] = (unsigned long)(arg4); \
6117 _argvec[5] = (unsigned long)(arg5); \
6118 _argvec[6] = (unsigned long)(arg6); \
6119 _argvec[7] = (unsigned long)(arg7); \
6120 _argvec[8] = (unsigned long)(arg8); \
6121 _argvec[9] = (unsigned long)(arg9); \
6122 _argvec[10] = (unsigned long)(arg10); \
6123 _argvec[11] = (unsigned long)(arg11); \
6124 _argvec[12] = (unsigned long)(arg12); \
6125 __asm__ volatile( \
6126 "dsubu $29, $29, 32\n\t" \
6127 "ld $4, 72(%1)\n\t" \
6128 "sd $4, 0($29)\n\t" \
6129 "ld $4, 80(%1)\n\t" \
6130 "sd $4, 8($29)\n\t" \
6131 "ld $4, 88(%1)\n\t" \
6132 "sd $4, 16($29)\n\t" \
6133 "ld $4, 96(%1)\n\t" \
6134 "sd $4, 24($29)\n\t" \
6135 "ld $4, 8(%1)\n\t" \
6136 "ld $5, 16(%1)\n\t" \
6137 "ld $6, 24(%1)\n\t" \
6138 "ld $7, 32(%1)\n\t" \
6139 "ld $8, 40(%1)\n\t" \
6140 "ld $9, 48(%1)\n\t" \
6141 "ld $10, 56(%1)\n\t" \
6142 "ld $11, 64(%1)\n\t" \
6143 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6144 VALGRIND_CALL_NOREDIR_T9 \
6145 "daddu $29, $29, 32\n\t" \
6146 "move %0, $2\n" \
6147 : /*out*/ "=r" (_res) \
6148 : /*in*/ "r" (&_argvec[0]) \
6149 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6150 ); \
6151 lval = (__typeof__(lval)) _res; \
6152 } while (0)
6154 #endif /* PLAT_mips64_linux */
6156 /* ------------------------ tilegx-linux ------------------------- */
6158 #if defined(PLAT_tilegx_linux)
6160 /* These regs are trashed by the hidden call. */
6161 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r5", \
6162 "r6", "r7", "r8", "r9", "r10", "r11", "r12", "r13", "r14", \
6163 "r15", "r16", "r17", "r18", "r19", "r20", "r21", "r22", \
6164 "r23", "r24", "r25", "r26", "r27", "r28", "r29", "lr"
6166 /* These CALL_FN_ macros assume that on tilegx-linux, sizeof(unsigned
6167 long) == 8. */
6169 #define CALL_FN_W_v(lval, orig) \
6170 do { \
6171 volatile OrigFn _orig = (orig); \
6172 volatile unsigned long _argvec[1]; \
6173 volatile unsigned long _res; \
6174 _argvec[0] = (unsigned long)_orig.nraddr; \
6175 __asm__ volatile( \
6176 "addi sp, sp, -8 \n\t" \
6177 "st_add sp, lr, -8 \n\t" \
6178 "ld r12, %1 \n\t" /* target->r11 */ \
6179 VALGRIND_CALL_NOREDIR_R12 \
6180 "addi sp, sp, 8\n\t" \
6181 "ld_add lr, sp, 8 \n\t" \
6182 "move %0, r0 \n" \
6183 : /*out*/ "=r" (_res) \
6184 : /*in*/ "r" (&_argvec[0]) \
6185 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6187 lval = (__typeof__(lval)) _res; \
6188 } while (0)
6190 #define CALL_FN_W_W(lval, orig, arg1) \
6191 do { \
6192 volatile OrigFn _orig = (orig); \
6193 volatile unsigned long _argvec[2]; \
6194 volatile unsigned long _res; \
6195 _argvec[0] = (unsigned long)_orig.nraddr; \
6196 _argvec[1] = (unsigned long)(arg1); \
6197 __asm__ volatile( \
6198 "addi sp, sp, -8 \n\t" \
6199 "st_add sp, lr, -8 \n\t" \
6200 "move r29, %1 \n\t" \
6201 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6202 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6203 VALGRIND_CALL_NOREDIR_R12 \
6204 "addi sp, sp, 8\n\t" \
6205 "ld_add lr, sp, 8 \n\t" \
6206 "move %0, r0\n" \
6207 : /*out*/ "=r" (_res) \
6208 : /*in*/ "r" (&_argvec[0]) \
6209 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6210 lval = (__typeof__(lval)) _res; \
6211 } while (0)
6213 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6214 do { \
6215 volatile OrigFn _orig = (orig); \
6216 volatile unsigned long _argvec[3]; \
6217 volatile unsigned long _res; \
6218 _argvec[0] = (unsigned long)_orig.nraddr; \
6219 _argvec[1] = (unsigned long)(arg1); \
6220 _argvec[2] = (unsigned long)(arg2); \
6221 __asm__ volatile( \
6222 "addi sp, sp, -8 \n\t" \
6223 "st_add sp, lr, -8 \n\t" \
6224 "move r29, %1 \n\t" \
6225 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6226 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6227 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6228 VALGRIND_CALL_NOREDIR_R12 \
6229 "addi sp, sp, 8\n\t" \
6230 "ld_add lr, sp, 8 \n\t" \
6231 "move %0, r0\n" \
6232 : /*out*/ "=r" (_res) \
6233 : /*in*/ "r" (&_argvec[0]) \
6234 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6235 lval = (__typeof__(lval)) _res; \
6236 } while (0)
6238 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6239 do { \
6240 volatile OrigFn _orig = (orig); \
6241 volatile unsigned long _argvec[4]; \
6242 volatile unsigned long _res; \
6243 _argvec[0] = (unsigned long)_orig.nraddr; \
6244 _argvec[1] = (unsigned long)(arg1); \
6245 _argvec[2] = (unsigned long)(arg2); \
6246 _argvec[3] = (unsigned long)(arg3); \
6247 __asm__ volatile( \
6248 "addi sp, sp, -8 \n\t" \
6249 "st_add sp, lr, -8 \n\t" \
6250 "move r29, %1 \n\t" \
6251 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6252 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6253 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6254 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6255 VALGRIND_CALL_NOREDIR_R12 \
6256 "addi sp, sp, 8 \n\t" \
6257 "ld_add lr, sp, 8 \n\t" \
6258 "move %0, r0\n" \
6259 : /*out*/ "=r" (_res) \
6260 : /*in*/ "r" (&_argvec[0]) \
6261 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6262 lval = (__typeof__(lval)) _res; \
6263 } while (0)
6265 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6266 do { \
6267 volatile OrigFn _orig = (orig); \
6268 volatile unsigned long _argvec[5]; \
6269 volatile unsigned long _res; \
6270 _argvec[0] = (unsigned long)_orig.nraddr; \
6271 _argvec[1] = (unsigned long)(arg1); \
6272 _argvec[2] = (unsigned long)(arg2); \
6273 _argvec[3] = (unsigned long)(arg3); \
6274 _argvec[4] = (unsigned long)(arg4); \
6275 __asm__ volatile( \
6276 "addi sp, sp, -8 \n\t" \
6277 "st_add sp, lr, -8 \n\t" \
6278 "move r29, %1 \n\t" \
6279 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6280 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6281 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6282 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6283 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6284 VALGRIND_CALL_NOREDIR_R12 \
6285 "addi sp, sp, 8\n\t" \
6286 "ld_add lr, sp, 8 \n\t" \
6287 "move %0, r0\n" \
6288 : /*out*/ "=r" (_res) \
6289 : /*in*/ "r" (&_argvec[0]) \
6290 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6291 lval = (__typeof__(lval)) _res; \
6292 } while (0)
6294 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6295 do { \
6296 volatile OrigFn _orig = (orig); \
6297 volatile unsigned long _argvec[6]; \
6298 volatile unsigned long _res; \
6299 _argvec[0] = (unsigned long)_orig.nraddr; \
6300 _argvec[1] = (unsigned long)(arg1); \
6301 _argvec[2] = (unsigned long)(arg2); \
6302 _argvec[3] = (unsigned long)(arg3); \
6303 _argvec[4] = (unsigned long)(arg4); \
6304 _argvec[5] = (unsigned long)(arg5); \
6305 __asm__ volatile( \
6306 "addi sp, sp, -8 \n\t" \
6307 "st_add sp, lr, -8 \n\t" \
6308 "move r29, %1 \n\t" \
6309 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6310 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6311 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6312 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6313 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6314 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6315 VALGRIND_CALL_NOREDIR_R12 \
6316 "addi sp, sp, 8\n\t" \
6317 "ld_add lr, sp, 8 \n\t" \
6318 "move %0, r0\n" \
6319 : /*out*/ "=r" (_res) \
6320 : /*in*/ "r" (&_argvec[0]) \
6321 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6322 lval = (__typeof__(lval)) _res; \
6323 } while (0)
6324 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6325 do { \
6326 volatile OrigFn _orig = (orig); \
6327 volatile unsigned long _argvec[7]; \
6328 volatile unsigned long _res; \
6329 _argvec[0] = (unsigned long)_orig.nraddr; \
6330 _argvec[1] = (unsigned long)(arg1); \
6331 _argvec[2] = (unsigned long)(arg2); \
6332 _argvec[3] = (unsigned long)(arg3); \
6333 _argvec[4] = (unsigned long)(arg4); \
6334 _argvec[5] = (unsigned long)(arg5); \
6335 _argvec[6] = (unsigned long)(arg6); \
6336 __asm__ volatile( \
6337 "addi sp, sp, -8 \n\t" \
6338 "st_add sp, lr, -8 \n\t" \
6339 "move r29, %1 \n\t" \
6340 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6341 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6342 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6343 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6344 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6345 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6346 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6347 VALGRIND_CALL_NOREDIR_R12 \
6348 "addi sp, sp, 8\n\t" \
6349 "ld_add lr, sp, 8 \n\t" \
6350 "move %0, r0\n" \
6351 : /*out*/ "=r" (_res) \
6352 : /*in*/ "r" (&_argvec[0]) \
6353 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6354 lval = (__typeof__(lval)) _res; \
6355 } while (0)
6357 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6358 arg7) \
6359 do { \
6360 volatile OrigFn _orig = (orig); \
6361 volatile unsigned long _argvec[8]; \
6362 volatile unsigned long _res; \
6363 _argvec[0] = (unsigned long)_orig.nraddr; \
6364 _argvec[1] = (unsigned long)(arg1); \
6365 _argvec[2] = (unsigned long)(arg2); \
6366 _argvec[3] = (unsigned long)(arg3); \
6367 _argvec[4] = (unsigned long)(arg4); \
6368 _argvec[5] = (unsigned long)(arg5); \
6369 _argvec[6] = (unsigned long)(arg6); \
6370 _argvec[7] = (unsigned long)(arg7); \
6371 __asm__ volatile( \
6372 "addi sp, sp, -8 \n\t" \
6373 "st_add sp, lr, -8 \n\t" \
6374 "move r29, %1 \n\t" \
6375 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6376 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6377 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6378 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6379 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6380 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6381 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6382 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6383 VALGRIND_CALL_NOREDIR_R12 \
6384 "addi sp, sp, 8\n\t" \
6385 "ld_add lr, sp, 8 \n\t" \
6386 "move %0, r0\n" \
6387 : /*out*/ "=r" (_res) \
6388 : /*in*/ "r" (&_argvec[0]) \
6389 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6390 lval = (__typeof__(lval)) _res; \
6391 } while (0)
6393 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6394 arg7,arg8) \
6395 do { \
6396 volatile OrigFn _orig = (orig); \
6397 volatile unsigned long _argvec[9]; \
6398 volatile unsigned long _res; \
6399 _argvec[0] = (unsigned long)_orig.nraddr; \
6400 _argvec[1] = (unsigned long)(arg1); \
6401 _argvec[2] = (unsigned long)(arg2); \
6402 _argvec[3] = (unsigned long)(arg3); \
6403 _argvec[4] = (unsigned long)(arg4); \
6404 _argvec[5] = (unsigned long)(arg5); \
6405 _argvec[6] = (unsigned long)(arg6); \
6406 _argvec[7] = (unsigned long)(arg7); \
6407 _argvec[8] = (unsigned long)(arg8); \
6408 __asm__ volatile( \
6409 "addi sp, sp, -8 \n\t" \
6410 "st_add sp, lr, -8 \n\t" \
6411 "move r29, %1 \n\t" \
6412 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6413 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6414 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6415 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6416 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6417 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6418 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6419 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6420 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6421 VALGRIND_CALL_NOREDIR_R12 \
6422 "addi sp, sp, 8\n\t" \
6423 "ld_add lr, sp, 8 \n\t" \
6424 "move %0, r0\n" \
6425 : /*out*/ "=r" (_res) \
6426 : /*in*/ "r" (&_argvec[0]) \
6427 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6428 lval = (__typeof__(lval)) _res; \
6429 } while (0)
6431 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6432 arg7,arg8,arg9) \
6433 do { \
6434 volatile OrigFn _orig = (orig); \
6435 volatile unsigned long _argvec[10]; \
6436 volatile unsigned long _res; \
6437 _argvec[0] = (unsigned long)_orig.nraddr; \
6438 _argvec[1] = (unsigned long)(arg1); \
6439 _argvec[2] = (unsigned long)(arg2); \
6440 _argvec[3] = (unsigned long)(arg3); \
6441 _argvec[4] = (unsigned long)(arg4); \
6442 _argvec[5] = (unsigned long)(arg5); \
6443 _argvec[6] = (unsigned long)(arg6); \
6444 _argvec[7] = (unsigned long)(arg7); \
6445 _argvec[8] = (unsigned long)(arg8); \
6446 _argvec[9] = (unsigned long)(arg9); \
6447 __asm__ volatile( \
6448 "addi sp, sp, -8 \n\t" \
6449 "st_add sp, lr, -8 \n\t" \
6450 "move r29, %1 \n\t" \
6451 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6452 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6453 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6454 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6455 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6456 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6457 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6458 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6459 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6460 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6461 VALGRIND_CALL_NOREDIR_R12 \
6462 "addi sp, sp, 8\n\t" \
6463 "ld_add lr, sp, 8 \n\t" \
6464 "move %0, r0\n" \
6465 : /*out*/ "=r" (_res) \
6466 : /*in*/ "r" (&_argvec[0]) \
6467 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6468 lval = (__typeof__(lval)) _res; \
6469 } while (0)
6471 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6472 arg7,arg8,arg9,arg10) \
6473 do { \
6474 volatile OrigFn _orig = (orig); \
6475 volatile unsigned long _argvec[11]; \
6476 volatile unsigned long _res; \
6477 _argvec[0] = (unsigned long)_orig.nraddr; \
6478 _argvec[1] = (unsigned long)(arg1); \
6479 _argvec[2] = (unsigned long)(arg2); \
6480 _argvec[3] = (unsigned long)(arg3); \
6481 _argvec[4] = (unsigned long)(arg4); \
6482 _argvec[5] = (unsigned long)(arg5); \
6483 _argvec[6] = (unsigned long)(arg6); \
6484 _argvec[7] = (unsigned long)(arg7); \
6485 _argvec[8] = (unsigned long)(arg8); \
6486 _argvec[9] = (unsigned long)(arg9); \
6487 _argvec[10] = (unsigned long)(arg10); \
6488 __asm__ volatile( \
6489 "addi sp, sp, -8 \n\t" \
6490 "st_add sp, lr, -8 \n\t" \
6491 "move r29, %1 \n\t" \
6492 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6493 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6494 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6495 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6496 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6497 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6498 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6499 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6500 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6501 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6502 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6503 VALGRIND_CALL_NOREDIR_R12 \
6504 "addi sp, sp, 8\n\t" \
6505 "ld_add lr, sp, 8 \n\t" \
6506 "move %0, r0\n" \
6507 : /*out*/ "=r" (_res) \
6508 : /*in*/ "r" (&_argvec[0]) \
6509 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6510 lval = (__typeof__(lval)) _res; \
6511 } while (0)
6513 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6514 arg6,arg7,arg8,arg9,arg10, \
6515 arg11) \
6516 do { \
6517 volatile OrigFn _orig = (orig); \
6518 volatile unsigned long _argvec[12]; \
6519 volatile unsigned long _res; \
6520 _argvec[0] = (unsigned long)_orig.nraddr; \
6521 _argvec[1] = (unsigned long)(arg1); \
6522 _argvec[2] = (unsigned long)(arg2); \
6523 _argvec[3] = (unsigned long)(arg3); \
6524 _argvec[4] = (unsigned long)(arg4); \
6525 _argvec[5] = (unsigned long)(arg5); \
6526 _argvec[6] = (unsigned long)(arg6); \
6527 _argvec[7] = (unsigned long)(arg7); \
6528 _argvec[8] = (unsigned long)(arg8); \
6529 _argvec[9] = (unsigned long)(arg9); \
6530 _argvec[10] = (unsigned long)(arg10); \
6531 _argvec[11] = (unsigned long)(arg11); \
6532 __asm__ volatile( \
6533 "addi sp, sp, -8 \n\t" \
6534 "st_add sp, lr, -8 \n\t" \
6535 "move r29, %1 \n\t" \
6536 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6537 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6538 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6539 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6540 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6541 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6542 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6543 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6544 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6545 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6546 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6547 "ld r10, r29 \n\t" \
6548 "st_add sp, r10, -16 \n\t" \
6549 VALGRIND_CALL_NOREDIR_R12 \
6550 "addi sp, sp, 24 \n\t" \
6551 "ld_add lr, sp, 8 \n\t" \
6552 "move %0, r0\n" \
6553 : /*out*/ "=r" (_res) \
6554 : /*in*/ "r" (&_argvec[0]) \
6555 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6556 lval = (__typeof__(lval)) _res; \
6557 } while (0)
6559 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6560 arg6,arg7,arg8,arg9,arg10, \
6561 arg11,arg12) \
6562 do { \
6563 volatile OrigFn _orig = (orig); \
6564 volatile unsigned long _argvec[13]; \
6565 volatile unsigned long _res; \
6566 _argvec[0] = (unsigned long)_orig.nraddr; \
6567 _argvec[1] = (unsigned long)(arg1); \
6568 _argvec[2] = (unsigned long)(arg2); \
6569 _argvec[3] = (unsigned long)(arg3); \
6570 _argvec[4] = (unsigned long)(arg4); \
6571 _argvec[5] = (unsigned long)(arg5); \
6572 _argvec[6] = (unsigned long)(arg6); \
6573 _argvec[7] = (unsigned long)(arg7); \
6574 _argvec[8] = (unsigned long)(arg8); \
6575 _argvec[9] = (unsigned long)(arg9); \
6576 _argvec[10] = (unsigned long)(arg10); \
6577 _argvec[11] = (unsigned long)(arg11); \
6578 _argvec[12] = (unsigned long)(arg12); \
6579 __asm__ volatile( \
6580 "addi sp, sp, -8 \n\t" \
6581 "st_add sp, lr, -8 \n\t" \
6582 "move r29, %1 \n\t" \
6583 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6584 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6585 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6586 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6587 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6588 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6589 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6590 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6591 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6592 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6593 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6594 "addi r28, sp, -8 \n\t" \
6595 "addi sp, sp, -24 \n\t" \
6596 "ld_add r10, r29, 8 \n\t" \
6597 "ld r11, r29 \n\t" \
6598 "st_add r28, r10, 8 \n\t" \
6599 "st r28, r11 \n\t" \
6600 VALGRIND_CALL_NOREDIR_R12 \
6601 "addi sp, sp, 32 \n\t" \
6602 "ld_add lr, sp, 8 \n\t" \
6603 "move %0, r0\n" \
6604 : /*out*/ "=r" (_res) \
6605 : /*in*/ "r" (&_argvec[0]) \
6606 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6607 lval = (__typeof__(lval)) _res; \
6608 } while (0)
6609 #endif /* PLAT_tilegx_linux */
6611 /* ------------------------------------------------------------------ */
6612 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6613 /* */
6614 /* ------------------------------------------------------------------ */
6616 /* Some request codes. There are many more of these, but most are not
6617 exposed to end-user view. These are the public ones, all of the
6618 form 0x1000 + small_number.
6620 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6621 ones start at 0x2000.
6624 /* These macros are used by tools -- they must be public, but don't
6625 embed them into other programs. */
6626 #define VG_USERREQ_TOOL_BASE(a,b) \
6627 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6628 #define VG_IS_TOOL_USERREQ(a, b, v) \
6629 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6631 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6632 This enum comprises an ABI exported by Valgrind to programs
6633 which use client requests. DO NOT CHANGE THE ORDER OF THESE
6634 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
6635 typedef
6636 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6637 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6639 /* These allow any function to be called from the simulated
6640 CPU but run on the real CPU. Nb: the first arg passed to
6641 the function is always the ThreadId of the running
6642 thread! So CLIENT_CALL0 actually requires a 1 arg
6643 function, etc. */
6644 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6645 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6646 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6647 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6649 /* Can be useful in regression testing suites -- eg. can
6650 send Valgrind's output to /dev/null and still count
6651 errors. */
6652 VG_USERREQ__COUNT_ERRORS = 0x1201,
6654 /* Allows the client program and/or gdbserver to execute a monitor
6655 command. */
6656 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6658 /* These are useful and can be interpreted by any tool that
6659 tracks malloc() et al, by using vg_replace_malloc.c. */
6660 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6661 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6662 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6663 /* Memory pool support. */
6664 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6665 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6666 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6667 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6668 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6669 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6670 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6671 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6673 /* Allow printfs to valgrind log. */
6674 /* The first two pass the va_list argument by value, which
6675 assumes it is the same size as or smaller than a UWord,
6676 which generally isn't the case. Hence are deprecated.
6677 The second two pass the vargs by reference and so are
6678 immune to this problem. */
6679 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6680 VG_USERREQ__PRINTF = 0x1401,
6681 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6682 /* both :: char* fmt, va_list* vargs */
6683 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6684 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6686 /* Stack support. */
6687 VG_USERREQ__STACK_REGISTER = 0x1501,
6688 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6689 VG_USERREQ__STACK_CHANGE = 0x1503,
6691 /* Wine support */
6692 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6694 /* Querying of debug info. */
6695 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6697 /* Disable/enable error reporting level. Takes a single
6698 Word arg which is the delta to this thread's error
6699 disablement indicator. Hence 1 disables or further
6700 disables errors, and -1 moves back towards enablement.
6701 Other values are not allowed. */
6702 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6704 /* Initialise IR injection */
6705 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
6706 } Vg_ClientRequest;
6708 #if !defined(__GNUC__)
6709 # define __extension__ /* */
6710 #endif
6713 /* Returns the number of Valgrinds this code is running under. That
6714 is, 0 if running natively, 1 if running under Valgrind, 2 if
6715 running under Valgrind which is running under another Valgrind,
6716 etc. */
6717 #define RUNNING_ON_VALGRIND \
6718 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6719 VG_USERREQ__RUNNING_ON_VALGRIND, \
6720 0, 0, 0, 0, 0) \
6723 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6724 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6725 since it provides a way to make sure valgrind will retranslate the
6726 invalidated area. Returns no value. */
6727 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6728 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6729 _qzz_addr, _qzz_len, 0, 0, 0)
6732 /* These requests are for getting Valgrind itself to print something.
6733 Possibly with a backtrace. This is a really ugly hack. The return value
6734 is the number of characters printed, excluding the "**<pid>** " part at the
6735 start and the backtrace (if present). */
6737 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6738 /* Modern GCC will optimize the static routine out if unused,
6739 and unused attribute will shut down warnings about it. */
6740 static int VALGRIND_PRINTF(const char *format, ...)
6741 __attribute__((format(__printf__, 1, 2), __unused__));
6742 #endif
6743 static int
6744 #if defined(_MSC_VER)
6745 __inline
6746 #endif
6747 VALGRIND_PRINTF(const char *format, ...)
6749 #if defined(NVALGRIND)
6750 return 0;
6751 #else /* NVALGRIND */
6752 #if defined(_MSC_VER) || defined(__MINGW64__)
6753 uintptr_t _qzz_res;
6754 #else
6755 unsigned long _qzz_res;
6756 #endif
6757 va_list vargs;
6758 va_start(vargs, format);
6759 #if defined(_MSC_VER) || defined(__MINGW64__)
6760 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6761 VG_USERREQ__PRINTF_VALIST_BY_REF,
6762 (uintptr_t)format,
6763 (uintptr_t)&vargs,
6764 0, 0, 0);
6765 #else
6766 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6767 VG_USERREQ__PRINTF_VALIST_BY_REF,
6768 (unsigned long)format,
6769 (unsigned long)&vargs,
6770 0, 0, 0);
6771 #endif
6772 va_end(vargs);
6773 return (int)_qzz_res;
6774 #endif /* NVALGRIND */
6777 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6778 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6779 __attribute__((format(__printf__, 1, 2), __unused__));
6780 #endif
6781 static int
6782 #if defined(_MSC_VER)
6783 __inline
6784 #endif
6785 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6787 #if defined(NVALGRIND)
6788 return 0;
6789 #else /* NVALGRIND */
6790 #if defined(_MSC_VER) || defined(__MINGW64__)
6791 uintptr_t _qzz_res;
6792 #else
6793 unsigned long _qzz_res;
6794 #endif
6795 va_list vargs;
6796 va_start(vargs, format);
6797 #if defined(_MSC_VER) || defined(__MINGW64__)
6798 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6799 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6800 (uintptr_t)format,
6801 (uintptr_t)&vargs,
6802 0, 0, 0);
6803 #else
6804 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6805 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6806 (unsigned long)format,
6807 (unsigned long)&vargs,
6808 0, 0, 0);
6809 #endif
6810 va_end(vargs);
6811 return (int)_qzz_res;
6812 #endif /* NVALGRIND */
6816 /* These requests allow control to move from the simulated CPU to the
6817 real CPU, calling an arbitary function.
6819 Note that the current ThreadId is inserted as the first argument.
6820 So this call:
6822 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6824 requires f to have this signature:
6826 Word f(Word tid, Word arg1, Word arg2)
6828 where "Word" is a word-sized type.
6830 Note that these client requests are not entirely reliable. For example,
6831 if you call a function with them that subsequently calls printf(),
6832 there's a high chance Valgrind will crash. Generally, your prospects of
6833 these working are made higher if the called function does not refer to
6834 any global variables, and does not refer to any libc or other functions
6835 (printf et al). Any kind of entanglement with libc or dynamic linking is
6836 likely to have a bad outcome, for tricky reasons which we've grappled
6837 with a lot in the past.
6839 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6840 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6841 VG_USERREQ__CLIENT_CALL0, \
6842 _qyy_fn, \
6843 0, 0, 0, 0)
6845 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6846 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6847 VG_USERREQ__CLIENT_CALL1, \
6848 _qyy_fn, \
6849 _qyy_arg1, 0, 0, 0)
6851 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6852 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6853 VG_USERREQ__CLIENT_CALL2, \
6854 _qyy_fn, \
6855 _qyy_arg1, _qyy_arg2, 0, 0)
6857 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6858 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6859 VG_USERREQ__CLIENT_CALL3, \
6860 _qyy_fn, \
6861 _qyy_arg1, _qyy_arg2, \
6862 _qyy_arg3, 0)
6865 /* Counts the number of errors that have been recorded by a tool. Nb:
6866 the tool must record the errors with VG_(maybe_record_error)() or
6867 VG_(unique_error)() for them to be counted. */
6868 #define VALGRIND_COUNT_ERRORS \
6869 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6870 0 /* default return */, \
6871 VG_USERREQ__COUNT_ERRORS, \
6872 0, 0, 0, 0, 0)
6874 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6875 when heap blocks are allocated in order to give accurate results. This
6876 happens automatically for the standard allocator functions such as
6877 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6878 delete[], etc.
6880 But if your program uses a custom allocator, this doesn't automatically
6881 happen, and Valgrind will not do as well. For example, if you allocate
6882 superblocks with mmap() and then allocates chunks of the superblocks, all
6883 Valgrind's observations will be at the mmap() level and it won't know that
6884 the chunks should be considered separate entities. In Memcheck's case,
6885 that means you probably won't get heap block overrun detection (because
6886 there won't be redzones marked as unaddressable) and you definitely won't
6887 get any leak detection.
6889 The following client requests allow a custom allocator to be annotated so
6890 that it can be handled accurately by Valgrind.
6892 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6893 by a malloc()-like function. For Memcheck (an illustrative case), this
6894 does two things:
6896 - It records that the block has been allocated. This means any addresses
6897 within the block mentioned in error messages will be
6898 identified as belonging to the block. It also means that if the block
6899 isn't freed it will be detected by the leak checker.
6901 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6902 not set), or addressable and defined (if 'is_zeroed' is set). This
6903 controls how accesses to the block by the program are handled.
6905 'addr' is the start of the usable block (ie. after any
6906 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6907 can apply redzones -- these are blocks of padding at the start and end of
6908 each block. Adding redzones is recommended as it makes it much more likely
6909 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6910 zeroed (or filled with another predictable value), as is the case for
6911 calloc().
6913 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6914 heap block -- that will be used by the client program -- is allocated.
6915 It's best to put it at the outermost level of the allocator if possible;
6916 for example, if you have a function my_alloc() which calls
6917 internal_alloc(), and the client request is put inside internal_alloc(),
6918 stack traces relating to the heap block will contain entries for both
6919 my_alloc() and internal_alloc(), which is probably not what you want.
6921 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6922 custom blocks from within a heap block, B, that has been allocated with
6923 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6924 -- the custom blocks will take precedence.
6926 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6927 Memcheck, it does two things:
6929 - It records that the block has been deallocated. This assumes that the
6930 block was annotated as having been allocated via
6931 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6933 - It marks the block as being unaddressable.
6935 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6936 heap block is deallocated.
6938 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6939 Memcheck, it does four things:
6941 - It records that the size of a block has been changed. This assumes that
6942 the block was annotated as having been allocated via
6943 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6945 - If the block shrunk, it marks the freed memory as being unaddressable.
6947 - If the block grew, it marks the new area as undefined and defines a red
6948 zone past the end of the new block.
6950 - The V-bits of the overlap between the old and the new block are preserved.
6952 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6953 and before deallocation of the old block.
6955 In many cases, these three client requests will not be enough to get your
6956 allocator working well with Memcheck. More specifically, if your allocator
6957 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6958 will be necessary to mark the memory as addressable just before the zeroing
6959 occurs, otherwise you'll get a lot of invalid write errors. For example,
6960 you'll need to do this if your allocator recycles freed blocks, but it
6961 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6962 Alternatively, if your allocator reuses freed blocks for allocator-internal
6963 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6965 Really, what's happening is a blurring of the lines between the client
6966 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6967 memory should be considered unaddressable to the client program, but the
6968 allocator knows more than the rest of the client program and so may be able
6969 to safely access it. Extra client requests are necessary for Valgrind to
6970 understand the distinction between the allocator and the rest of the
6971 program.
6973 Ignored if addr == 0.
6975 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6976 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6977 addr, sizeB, rzB, is_zeroed, 0)
6979 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6980 Ignored if addr == 0.
6982 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6983 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6984 addr, oldSizeB, newSizeB, rzB, 0)
6986 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6987 Ignored if addr == 0.
6989 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6990 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6991 addr, rzB, 0, 0, 0)
6993 /* Create a memory pool. */
6994 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6995 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6996 pool, rzB, is_zeroed, 0, 0)
6998 /* Destroy a memory pool. */
6999 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7000 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7001 pool, 0, 0, 0, 0)
7003 /* Associate a piece of memory with a memory pool. */
7004 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7005 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7006 pool, addr, size, 0, 0)
7008 /* Disassociate a piece of memory from a memory pool. */
7009 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7010 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7011 pool, addr, 0, 0, 0)
7013 /* Disassociate any pieces outside a particular range. */
7014 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7015 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7016 pool, addr, size, 0, 0)
7018 /* Resize and/or move a piece associated with a memory pool. */
7019 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7020 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7021 poolA, poolB, 0, 0, 0)
7023 /* Resize and/or move a piece associated with a memory pool. */
7024 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7025 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7026 pool, addrA, addrB, size, 0)
7028 /* Return 1 if a mempool exists, else 0. */
7029 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7030 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7031 VG_USERREQ__MEMPOOL_EXISTS, \
7032 pool, 0, 0, 0, 0)
7034 /* Mark a piece of memory as being a stack. Returns a stack id.
7035 start is the lowest addressable stack byte, end is the highest
7036 addressable stack byte. */
7037 #define VALGRIND_STACK_REGISTER(start, end) \
7038 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7039 VG_USERREQ__STACK_REGISTER, \
7040 start, end, 0, 0, 0)
7042 /* Unmark the piece of memory associated with a stack id as being a
7043 stack. */
7044 #define VALGRIND_STACK_DEREGISTER(id) \
7045 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7046 id, 0, 0, 0, 0)
7048 /* Change the start and end address of the stack id.
7049 start is the new lowest addressable stack byte, end is the new highest
7050 addressable stack byte. */
7051 #define VALGRIND_STACK_CHANGE(id, start, end) \
7052 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7053 id, start, end, 0, 0)
7055 /* Load PDB debug info for Wine PE image_map. */
7056 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7057 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7058 fd, ptr, total_size, delta, 0)
7060 /* Map a code address to a source file name and line number. buf64
7061 must point to a 64-byte buffer in the caller's address space. The
7062 result will be dumped in there and is guaranteed to be zero
7063 terminated. If no info is found, the first byte is set to zero. */
7064 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7065 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7066 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7067 addr, buf64, 0, 0, 0)
7069 /* Disable error reporting for this thread. Behaves in a stack like
7070 way, so you can safely call this multiple times provided that
7071 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7072 to re-enable reporting. The first call of this macro disables
7073 reporting. Subsequent calls have no effect except to increase the
7074 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7075 reporting. Child threads do not inherit this setting from their
7076 parents -- they are always created with reporting enabled. */
7077 #define VALGRIND_DISABLE_ERROR_REPORTING \
7078 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7079 1, 0, 0, 0, 0)
7081 /* Re-enable error reporting, as per comments on
7082 VALGRIND_DISABLE_ERROR_REPORTING. */
7083 #define VALGRIND_ENABLE_ERROR_REPORTING \
7084 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7085 -1, 0, 0, 0, 0)
7087 /* Execute a monitor command from the client program.
7088 If a connection is opened with GDB, the output will be sent
7089 according to the output mode set for vgdb.
7090 If no connection is opened, output will go to the log output.
7091 Returns 1 if command not recognised, 0 otherwise. */
7092 #define VALGRIND_MONITOR_COMMAND(command) \
7093 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7094 command, 0, 0, 0, 0)
7097 #undef PLAT_x86_darwin
7098 #undef PLAT_amd64_darwin
7099 #undef PLAT_x86_win32
7100 #undef PLAT_amd64_win64
7101 #undef PLAT_x86_linux
7102 #undef PLAT_amd64_linux
7103 #undef PLAT_ppc32_linux
7104 #undef PLAT_ppc64be_linux
7105 #undef PLAT_ppc64le_linux
7106 #undef PLAT_arm_linux
7107 #undef PLAT_s390x_linux
7108 #undef PLAT_mips32_linux
7109 #undef PLAT_mips64_linux
7110 #undef PLAT_tilegx_linux
7112 #endif /* __VALGRIND_H */