blob: 5d4fa5f43b4730ef269cae7210da86d76057d6bd [file] [log] [blame]
Sean Andersonfba08822022-03-23 14:04:48 -04001/* SPDX-License-Identifier: GPL-2.0+ AND bzip2-1.0.6 */
2/*
3 This file is part of Valgrind, a dynamic binary instrumentation
4 framework.
5
6 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
7 Copyright (C) 2021 Sean Anderson <seanga2@gmail.com>
8*/
9
10/* This file is for inclusion into client (your!) code.
11
12 You can use these macros to manipulate and query Valgrind's
13 execution inside your own programs.
14
15 The resulting executables will still run without Valgrind, just a
16 little bit more slowly than they otherwise would, but otherwise
17 unchanged. When not running on valgrind, each client request
18 consumes very few (eg. 7) instructions, so the resulting performance
19 loss is negligible unless you plan to execute client requests
20 millions of times per second. Nevertheless, if that is still a
21 problem, you can compile with the NVALGRIND symbol defined (gcc
22 -DNVALGRIND) so that client requests are not even compiled in. */
23
24#ifndef __VALGRIND_H
25#define __VALGRIND_H
26
27
28/* ------------------------------------------------------------------ */
29/* VERSION NUMBER OF VALGRIND */
30/* ------------------------------------------------------------------ */
31
32/* Specify Valgrind's version number, so that user code can
33 conditionally compile based on our version number. Note that these
34 were introduced at version 3.6 and so do not exist in version 3.5
35 or earlier. The recommended way to use them to check for "version
36 X.Y or later" is (eg)
37
38#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
39 && (__VALGRIND_MAJOR__ > 3 \
40 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
41*/
42#define __VALGRIND_MAJOR__ 3
43#define __VALGRIND_MINOR__ 16
44
45
46#include <stdarg.h>
47
48/* Nb: this file might be included in a file compiled with -ansi. So
49 we can't use C++ style "//" comments nor the "asm" keyword (instead
50 use "__asm__"). */
51
52/* Derive some tags indicating what the target platform is. Note
53 that in this file we're using the compiler's CPP symbols for
54 identifying architectures, which are different to the ones we use
55 within the rest of Valgrind. Note, __powerpc__ is active for both
56 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
57 latter (on Linux, that is).
58
59 Misc note: how to find out what's predefined in gcc by default:
60 gcc -Wp,-dM somefile.c
61*/
62#undef PLAT_x86_darwin
63#undef PLAT_amd64_darwin
64#undef PLAT_x86_win32
65#undef PLAT_amd64_win64
66#undef PLAT_x86_linux
67#undef PLAT_amd64_linux
68#undef PLAT_ppc32_linux
69#undef PLAT_ppc64be_linux
70#undef PLAT_ppc64le_linux
71#undef PLAT_arm_linux
72#undef PLAT_arm64_linux
73#undef PLAT_s390x_linux
74#undef PLAT_mips32_linux
75#undef PLAT_mips64_linux
76#undef PLAT_nanomips_linux
77#undef PLAT_x86_solaris
78#undef PLAT_amd64_solaris
79
80
81#if defined(__APPLE__) && defined(__i386__)
82# define PLAT_x86_darwin 1
83#elif defined(__APPLE__) && defined(__x86_64__)
84# define PLAT_amd64_darwin 1
85#elif (defined(__MINGW32__) && defined(__i386__)) \
86 || defined(__CYGWIN32__) \
87 || (defined(_WIN32) && defined(_M_IX86))
88# define PLAT_x86_win32 1
89#elif (defined(__MINGW32__) && defined(__x86_64__)) \
90 || (defined(_WIN32) && defined(_M_X64))
91/* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
92# define PLAT_amd64_win64 1
93#elif defined(__linux__) && defined(__i386__)
94# define PLAT_x86_linux 1
95#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
96# define PLAT_amd64_linux 1
97#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
98# define PLAT_ppc32_linux 1
99#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
100/* Big Endian uses ELF version 1 */
101# define PLAT_ppc64be_linux 1
102#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
103/* Little Endian uses ELF version 2 */
104# define PLAT_ppc64le_linux 1
105#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
106# define PLAT_arm_linux 1
107#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
108# define PLAT_arm64_linux 1
109#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
110# define PLAT_s390x_linux 1
111#elif defined(__linux__) && defined(__mips__) && (__mips==64)
112# define PLAT_mips64_linux 1
113#elif defined(__linux__) && defined(__mips__) && (__mips==32)
114# define PLAT_mips32_linux 1
115#elif defined(__linux__) && defined(__nanomips__)
116# define PLAT_nanomips_linux 1
117#elif defined(__sun) && defined(__i386__)
118# define PLAT_x86_solaris 1
119#elif defined(__sun) && defined(__x86_64__)
120# define PLAT_amd64_solaris 1
121#else
122/* If we're not compiling for our target platform, don't generate
123 any inline asms. */
Tom Rinidc2c4512022-12-04 10:03:32 -0500124# if IS_ENABLED(CONFIG_VALGRIND)
125# error "Unsupported platform for valgrind"
126# endif
Sean Andersonfba08822022-03-23 14:04:48 -0400127#endif
128
129
130/* ------------------------------------------------------------------ */
131/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
132/* in here of use to end-users -- skip to the next section. */
133/* ------------------------------------------------------------------ */
134
135/*
136 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
137 * request. Accepts both pointers and integers as arguments.
138 *
139 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
140 * client request that does not return a value.
141
142 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
143 * client request and whose value equals the client request result. Accepts
144 * both pointers and integers as arguments. Note that such calls are not
145 * necessarily pure functions -- they may have side effects.
146 */
147
148#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
149 _zzq_request, _zzq_arg1, _zzq_arg2, \
150 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
151 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
152 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
153 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
154
155#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
156 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
157 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
158 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
159 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
160
161#if !IS_ENABLED(CONFIG_VALGRIND)
162
163/* Define NVALGRIND to completely remove the Valgrind magic sequence
164 from the compiled code (analogous to NDEBUG's effects on
165 assert()) */
166#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
167 _zzq_default, _zzq_request, \
168 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
169 (_zzq_default)
170
171#else /* ! CONFIG_VALGRIND */
172
173/* The following defines the magic code sequences which the JITter
174 spots and handles magically. Don't look too closely at them as
175 they will rot your brain.
176
177 The assembly code sequences for all architectures is in this one
178 file. This is because this file must be stand-alone, and we don't
179 want to have multiple files.
180
181 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
182 value gets put in the return slot, so that everything works when
183 this is executed not under Valgrind. Args are passed in a memory
184 block, and so there's no intrinsic limit to the number that could
185 be passed, but it's currently five.
186
187 The macro args are:
188 _zzq_rlval result lvalue
189 _zzq_default default value (result returned when running on real CPU)
190 _zzq_request request code
191 _zzq_arg1..5 request params
192
193 The other two macros are used to support function wrapping, and are
194 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
195 guest's NRADDR pseudo-register and whatever other information is
196 needed to safely run the call original from the wrapper: on
197 ppc64-linux, the R2 value at the divert point is also needed. This
198 information is abstracted into a user-visible type, OrigFn.
199
200 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
201 guest, but guarantees that the branch instruction will not be
202 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
203 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
204 complete inline asm, since it needs to be combined with more magic
205 inline asm stuff to be useful.
206*/
207
208/* ----------------- x86-{linux,darwin,solaris} ---------------- */
209
210#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
211 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
212 || defined(PLAT_x86_solaris)
213
214typedef
215 struct {
216 unsigned int nraddr; /* where's the code? */
217 }
218 OrigFn;
219
220#define __SPECIAL_INSTRUCTION_PREAMBLE \
221 "roll $3, %%edi ; roll $13, %%edi\n\t" \
222 "roll $29, %%edi ; roll $19, %%edi\n\t"
223
224#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
225 _zzq_default, _zzq_request, \
226 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
227 __extension__ \
228 ({volatile unsigned int _zzq_args[6]; \
229 volatile unsigned int _zzq_result; \
230 _zzq_args[0] = (unsigned int)(_zzq_request); \
231 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
232 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
233 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
234 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
235 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
236 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
237 /* %EDX = client_request ( %EAX ) */ \
238 "xchgl %%ebx,%%ebx" \
239 : "=d" (_zzq_result) \
240 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
241 : "cc", "memory" \
242 ); \
243 _zzq_result; \
244 })
245
246#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
247 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
248 volatile unsigned int __addr; \
249 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
250 /* %EAX = guest_NRADDR */ \
251 "xchgl %%ecx,%%ecx" \
252 : "=a" (__addr) \
253 : \
254 : "cc", "memory" \
255 ); \
256 _zzq_orig->nraddr = __addr; \
257 }
258
259#define VALGRIND_CALL_NOREDIR_EAX \
260 __SPECIAL_INSTRUCTION_PREAMBLE \
261 /* call-noredir *%EAX */ \
262 "xchgl %%edx,%%edx\n\t"
263
264#define VALGRIND_VEX_INJECT_IR() \
265 do { \
266 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
267 "xchgl %%edi,%%edi\n\t" \
268 : : : "cc", "memory" \
269 ); \
270 } while (0)
271
272#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
273 || PLAT_x86_solaris */
274
275/* ------------------------- x86-Win32 ------------------------- */
276
277#if defined(PLAT_x86_win32) && !defined(__GNUC__)
278
279typedef
280 struct {
281 unsigned int nraddr; /* where's the code? */
282 }
283 OrigFn;
284
285#if defined(_MSC_VER)
286
287#define __SPECIAL_INSTRUCTION_PREAMBLE \
288 __asm rol edi, 3 __asm rol edi, 13 \
289 __asm rol edi, 29 __asm rol edi, 19
290
291#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
292 _zzq_default, _zzq_request, \
293 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
294 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
295 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
296 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
297 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
298
299static __inline uintptr_t
300valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
301 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
302 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
303 uintptr_t _zzq_arg5)
304{
305 volatile uintptr_t _zzq_args[6];
306 volatile unsigned int _zzq_result;
307 _zzq_args[0] = (uintptr_t)(_zzq_request);
308 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
309 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
310 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
311 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
312 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
313 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
314 __SPECIAL_INSTRUCTION_PREAMBLE
315 /* %EDX = client_request ( %EAX ) */
316 __asm xchg ebx,ebx
317 __asm mov _zzq_result, edx
318 }
319 return _zzq_result;
320}
321
322#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
323 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
324 volatile unsigned int __addr; \
325 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
326 /* %EAX = guest_NRADDR */ \
327 __asm xchg ecx,ecx \
328 __asm mov __addr, eax \
329 } \
330 _zzq_orig->nraddr = __addr; \
331 }
332
333#define VALGRIND_CALL_NOREDIR_EAX ERROR
334
335#define VALGRIND_VEX_INJECT_IR() \
336 do { \
337 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
338 __asm xchg edi,edi \
339 } \
340 } while (0)
341
342#else
343#error Unsupported compiler.
344#endif
345
346#endif /* PLAT_x86_win32 */
347
348/* ----------------- amd64-{linux,darwin,solaris} --------------- */
349
350#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
351 || defined(PLAT_amd64_solaris) \
352 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
353
354typedef
355 struct {
356 unsigned long int nraddr; /* where's the code? */
357 }
358 OrigFn;
359
360#define __SPECIAL_INSTRUCTION_PREAMBLE \
361 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
362 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
363
364#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
365 _zzq_default, _zzq_request, \
366 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
367 __extension__ \
368 ({ volatile unsigned long int _zzq_args[6]; \
369 volatile unsigned long int _zzq_result; \
370 _zzq_args[0] = (unsigned long int)(_zzq_request); \
371 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
372 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
373 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
374 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
375 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
376 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
377 /* %RDX = client_request ( %RAX ) */ \
378 "xchgq %%rbx,%%rbx" \
379 : "=d" (_zzq_result) \
380 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
381 : "cc", "memory" \
382 ); \
383 _zzq_result; \
384 })
385
386#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
387 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
388 volatile unsigned long int __addr; \
389 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
390 /* %RAX = guest_NRADDR */ \
391 "xchgq %%rcx,%%rcx" \
392 : "=a" (__addr) \
393 : \
394 : "cc", "memory" \
395 ); \
396 _zzq_orig->nraddr = __addr; \
397 }
398
399#define VALGRIND_CALL_NOREDIR_RAX \
400 __SPECIAL_INSTRUCTION_PREAMBLE \
401 /* call-noredir *%RAX */ \
402 "xchgq %%rdx,%%rdx\n\t"
403
404#define VALGRIND_VEX_INJECT_IR() \
405 do { \
406 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
407 "xchgq %%rdi,%%rdi\n\t" \
408 : : : "cc", "memory" \
409 ); \
410 } while (0)
411
412#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
413
414/* ------------------------- amd64-Win64 ------------------------- */
415
416#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
417
418#error Unsupported compiler.
419
420#endif /* PLAT_amd64_win64 */
421
422/* ------------------------ ppc32-linux ------------------------ */
423
424#if defined(PLAT_ppc32_linux)
425
426typedef
427 struct {
428 unsigned int nraddr; /* where's the code? */
429 }
430 OrigFn;
431
432#define __SPECIAL_INSTRUCTION_PREAMBLE \
433 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
434 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
435
436#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
437 _zzq_default, _zzq_request, \
438 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
439 \
440 __extension__ \
441 ({ unsigned int _zzq_args[6]; \
442 unsigned int _zzq_result; \
443 unsigned int* _zzq_ptr; \
444 _zzq_args[0] = (unsigned int)(_zzq_request); \
445 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
446 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
447 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
448 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
449 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
450 _zzq_ptr = _zzq_args; \
451 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
452 "mr 4,%2\n\t" /*ptr*/ \
453 __SPECIAL_INSTRUCTION_PREAMBLE \
454 /* %R3 = client_request ( %R4 ) */ \
455 "or 1,1,1\n\t" \
456 "mr %0,3" /*result*/ \
457 : "=b" (_zzq_result) \
458 : "b" (_zzq_default), "b" (_zzq_ptr) \
459 : "cc", "memory", "r3", "r4"); \
460 _zzq_result; \
461 })
462
463#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
464 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
465 unsigned int __addr; \
466 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
467 /* %R3 = guest_NRADDR */ \
468 "or 2,2,2\n\t" \
469 "mr %0,3" \
470 : "=b" (__addr) \
471 : \
472 : "cc", "memory", "r3" \
473 ); \
474 _zzq_orig->nraddr = __addr; \
475 }
476
477#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
478 __SPECIAL_INSTRUCTION_PREAMBLE \
479 /* branch-and-link-to-noredir *%R11 */ \
480 "or 3,3,3\n\t"
481
482#define VALGRIND_VEX_INJECT_IR() \
483 do { \
484 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
485 "or 5,5,5\n\t" \
486 ); \
487 } while (0)
488
489#endif /* PLAT_ppc32_linux */
490
491/* ------------------------ ppc64-linux ------------------------ */
492
493#if defined(PLAT_ppc64be_linux)
494
495typedef
496 struct {
497 unsigned long int nraddr; /* where's the code? */
498 unsigned long int r2; /* what tocptr do we need? */
499 }
500 OrigFn;
501
502#define __SPECIAL_INSTRUCTION_PREAMBLE \
503 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
504 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
505
506#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
507 _zzq_default, _zzq_request, \
508 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
509 \
510 __extension__ \
511 ({ unsigned long int _zzq_args[6]; \
512 unsigned long int _zzq_result; \
513 unsigned long int* _zzq_ptr; \
514 _zzq_args[0] = (unsigned long int)(_zzq_request); \
515 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
516 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
517 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
518 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
519 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
520 _zzq_ptr = _zzq_args; \
521 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
522 "mr 4,%2\n\t" /*ptr*/ \
523 __SPECIAL_INSTRUCTION_PREAMBLE \
524 /* %R3 = client_request ( %R4 ) */ \
525 "or 1,1,1\n\t" \
526 "mr %0,3" /*result*/ \
527 : "=b" (_zzq_result) \
528 : "b" (_zzq_default), "b" (_zzq_ptr) \
529 : "cc", "memory", "r3", "r4"); \
530 _zzq_result; \
531 })
532
533#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
534 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
535 unsigned long int __addr; \
536 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
537 /* %R3 = guest_NRADDR */ \
538 "or 2,2,2\n\t" \
539 "mr %0,3" \
540 : "=b" (__addr) \
541 : \
542 : "cc", "memory", "r3" \
543 ); \
544 _zzq_orig->nraddr = __addr; \
545 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
546 /* %R3 = guest_NRADDR_GPR2 */ \
547 "or 4,4,4\n\t" \
548 "mr %0,3" \
549 : "=b" (__addr) \
550 : \
551 : "cc", "memory", "r3" \
552 ); \
553 _zzq_orig->r2 = __addr; \
554 }
555
556#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
557 __SPECIAL_INSTRUCTION_PREAMBLE \
558 /* branch-and-link-to-noredir *%R11 */ \
559 "or 3,3,3\n\t"
560
561#define VALGRIND_VEX_INJECT_IR() \
562 do { \
563 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
564 "or 5,5,5\n\t" \
565 ); \
566 } while (0)
567
568#endif /* PLAT_ppc64be_linux */
569
570#if defined(PLAT_ppc64le_linux)
571
572typedef
573 struct {
574 unsigned long int nraddr; /* where's the code? */
575 unsigned long int r2; /* what tocptr do we need? */
576 }
577 OrigFn;
578
579#define __SPECIAL_INSTRUCTION_PREAMBLE \
580 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
581 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
582
583#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
584 _zzq_default, _zzq_request, \
585 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
586 \
587 __extension__ \
588 ({ unsigned long int _zzq_args[6]; \
589 unsigned long int _zzq_result; \
590 unsigned long int* _zzq_ptr; \
591 _zzq_args[0] = (unsigned long int)(_zzq_request); \
592 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
593 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
594 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
595 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
596 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
597 _zzq_ptr = _zzq_args; \
598 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
599 "mr 4,%2\n\t" /*ptr*/ \
600 __SPECIAL_INSTRUCTION_PREAMBLE \
601 /* %R3 = client_request ( %R4 ) */ \
602 "or 1,1,1\n\t" \
603 "mr %0,3" /*result*/ \
604 : "=b" (_zzq_result) \
605 : "b" (_zzq_default), "b" (_zzq_ptr) \
606 : "cc", "memory", "r3", "r4"); \
607 _zzq_result; \
608 })
609
610#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
611 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
612 unsigned long int __addr; \
613 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
614 /* %R3 = guest_NRADDR */ \
615 "or 2,2,2\n\t" \
616 "mr %0,3" \
617 : "=b" (__addr) \
618 : \
619 : "cc", "memory", "r3" \
620 ); \
621 _zzq_orig->nraddr = __addr; \
622 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
623 /* %R3 = guest_NRADDR_GPR2 */ \
624 "or 4,4,4\n\t" \
625 "mr %0,3" \
626 : "=b" (__addr) \
627 : \
628 : "cc", "memory", "r3" \
629 ); \
630 _zzq_orig->r2 = __addr; \
631 }
632
633#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
634 __SPECIAL_INSTRUCTION_PREAMBLE \
635 /* branch-and-link-to-noredir *%R12 */ \
636 "or 3,3,3\n\t"
637
638#define VALGRIND_VEX_INJECT_IR() \
639 do { \
640 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
641 "or 5,5,5\n\t" \
642 ); \
643 } while (0)
644
645#endif /* PLAT_ppc64le_linux */
646
647/* ------------------------- arm-linux ------------------------- */
648
649#if defined(PLAT_arm_linux)
650
651typedef
652 struct {
653 unsigned int nraddr; /* where's the code? */
654 }
655 OrigFn;
656
657#define __SPECIAL_INSTRUCTION_PREAMBLE \
658 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
659 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
660
661#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
662 _zzq_default, _zzq_request, \
663 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
664 \
665 __extension__ \
666 ({volatile unsigned int _zzq_args[6]; \
667 volatile unsigned int _zzq_result; \
668 _zzq_args[0] = (unsigned int)(_zzq_request); \
669 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
670 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
671 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
672 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
673 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
674 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
675 "mov r4, %2\n\t" /*ptr*/ \
676 __SPECIAL_INSTRUCTION_PREAMBLE \
677 /* R3 = client_request ( R4 ) */ \
678 "orr r10, r10, r10\n\t" \
679 "mov %0, r3" /*result*/ \
680 : "=r" (_zzq_result) \
681 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
682 : "cc","memory", "r3", "r4"); \
683 _zzq_result; \
684 })
685
686#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
687 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
688 unsigned int __addr; \
689 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
690 /* R3 = guest_NRADDR */ \
691 "orr r11, r11, r11\n\t" \
692 "mov %0, r3" \
693 : "=r" (__addr) \
694 : \
695 : "cc", "memory", "r3" \
696 ); \
697 _zzq_orig->nraddr = __addr; \
698 }
699
700#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
701 __SPECIAL_INSTRUCTION_PREAMBLE \
702 /* branch-and-link-to-noredir *%R4 */ \
703 "orr r12, r12, r12\n\t"
704
705#define VALGRIND_VEX_INJECT_IR() \
706 do { \
707 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
708 "orr r9, r9, r9\n\t" \
709 : : : "cc", "memory" \
710 ); \
711 } while (0)
712
713#endif /* PLAT_arm_linux */
714
715/* ------------------------ arm64-linux ------------------------- */
716
717#if defined(PLAT_arm64_linux)
718
719typedef
720 struct {
721 unsigned long int nraddr; /* where's the code? */
722 }
723 OrigFn;
724
725#define __SPECIAL_INSTRUCTION_PREAMBLE \
726 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
727 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
728
729#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
730 _zzq_default, _zzq_request, \
731 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
732 \
733 __extension__ \
734 ({volatile unsigned long int _zzq_args[6]; \
735 volatile unsigned long int _zzq_result; \
736 _zzq_args[0] = (unsigned long int)(_zzq_request); \
737 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
738 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
739 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
740 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
741 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
742 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
743 "mov x4, %2\n\t" /*ptr*/ \
744 __SPECIAL_INSTRUCTION_PREAMBLE \
745 /* X3 = client_request ( X4 ) */ \
746 "orr x10, x10, x10\n\t" \
747 "mov %0, x3" /*result*/ \
748 : "=r" (_zzq_result) \
749 : "r" ((unsigned long int)(_zzq_default)), \
750 "r" (&_zzq_args[0]) \
751 : "cc","memory", "x3", "x4"); \
752 _zzq_result; \
753 })
754
755#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
756 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
757 unsigned long int __addr; \
758 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
759 /* X3 = guest_NRADDR */ \
760 "orr x11, x11, x11\n\t" \
761 "mov %0, x3" \
762 : "=r" (__addr) \
763 : \
764 : "cc", "memory", "x3" \
765 ); \
766 _zzq_orig->nraddr = __addr; \
767 }
768
769#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
770 __SPECIAL_INSTRUCTION_PREAMBLE \
771 /* branch-and-link-to-noredir X8 */ \
772 "orr x12, x12, x12\n\t"
773
774#define VALGRIND_VEX_INJECT_IR() \
775 do { \
776 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
777 "orr x9, x9, x9\n\t" \
778 : : : "cc", "memory" \
779 ); \
780 } while (0)
781
782#endif /* PLAT_arm64_linux */
783
784/* ------------------------ s390x-linux ------------------------ */
785
786#if defined(PLAT_s390x_linux)
787
788typedef
789 struct {
790 unsigned long int nraddr; /* where's the code? */
791 }
792 OrigFn;
793
794/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
795 * code. This detection is implemented in platform specific toIR.c
796 * (e.g. VEX/priv/guest_s390_decoder.c).
797 */
798#define __SPECIAL_INSTRUCTION_PREAMBLE \
799 "lr 15,15\n\t" \
800 "lr 1,1\n\t" \
801 "lr 2,2\n\t" \
802 "lr 3,3\n\t"
803
804#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
805#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
806#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
807#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
808
809#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
810 _zzq_default, _zzq_request, \
811 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
812 __extension__ \
813 ({volatile unsigned long int _zzq_args[6]; \
814 volatile unsigned long int _zzq_result; \
815 _zzq_args[0] = (unsigned long int)(_zzq_request); \
816 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
817 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
818 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
819 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
820 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
821 __asm__ volatile(/* r2 = args */ \
822 "lgr 2,%1\n\t" \
823 /* r3 = default */ \
824 "lgr 3,%2\n\t" \
825 __SPECIAL_INSTRUCTION_PREAMBLE \
826 __CLIENT_REQUEST_CODE \
827 /* results = r3 */ \
828 "lgr %0, 3\n\t" \
829 : "=d" (_zzq_result) \
830 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
831 : "cc", "2", "3", "memory" \
832 ); \
833 _zzq_result; \
834 })
835
836#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
837 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
838 volatile unsigned long int __addr; \
839 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
840 __GET_NR_CONTEXT_CODE \
841 "lgr %0, 3\n\t" \
842 : "=a" (__addr) \
843 : \
844 : "cc", "3", "memory" \
845 ); \
846 _zzq_orig->nraddr = __addr; \
847 }
848
849#define VALGRIND_CALL_NOREDIR_R1 \
850 __SPECIAL_INSTRUCTION_PREAMBLE \
851 __CALL_NO_REDIR_CODE
852
853#define VALGRIND_VEX_INJECT_IR() \
854 do { \
855 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
856 __VEX_INJECT_IR_CODE); \
857 } while (0)
858
859#endif /* PLAT_s390x_linux */
860
861/* ------------------------- mips32-linux ---------------- */
862
863#if defined(PLAT_mips32_linux)
864
865typedef
866 struct {
867 unsigned int nraddr; /* where's the code? */
868 }
869 OrigFn;
870
871/* .word 0x342
872 * .word 0x742
873 * .word 0xC2
874 * .word 0x4C2*/
875#define __SPECIAL_INSTRUCTION_PREAMBLE \
876 "srl $0, $0, 13\n\t" \
877 "srl $0, $0, 29\n\t" \
878 "srl $0, $0, 3\n\t" \
879 "srl $0, $0, 19\n\t"
880
881#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
882 _zzq_default, _zzq_request, \
883 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
884 __extension__ \
885 ({ volatile unsigned int _zzq_args[6]; \
886 volatile unsigned int _zzq_result; \
887 _zzq_args[0] = (unsigned int)(_zzq_request); \
888 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
889 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
890 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
891 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
892 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
893 __asm__ volatile("move $11, %1\n\t" /*default*/ \
894 "move $12, %2\n\t" /*ptr*/ \
895 __SPECIAL_INSTRUCTION_PREAMBLE \
896 /* T3 = client_request ( T4 ) */ \
897 "or $13, $13, $13\n\t" \
898 "move %0, $11\n\t" /*result*/ \
899 : "=r" (_zzq_result) \
900 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
901 : "$11", "$12", "memory"); \
902 _zzq_result; \
903 })
904
905#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
906 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
907 volatile unsigned int __addr; \
908 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
909 /* %t9 = guest_NRADDR */ \
910 "or $14, $14, $14\n\t" \
911 "move %0, $11" /*result*/ \
912 : "=r" (__addr) \
913 : \
914 : "$11" \
915 ); \
916 _zzq_orig->nraddr = __addr; \
917 }
918
919#define VALGRIND_CALL_NOREDIR_T9 \
920 __SPECIAL_INSTRUCTION_PREAMBLE \
921 /* call-noredir *%t9 */ \
922 "or $15, $15, $15\n\t"
923
924#define VALGRIND_VEX_INJECT_IR() \
925 do { \
926 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
927 "or $11, $11, $11\n\t" \
928 ); \
929 } while (0)
930
931
932#endif /* PLAT_mips32_linux */
933
934/* ------------------------- mips64-linux ---------------- */
935
936#if defined(PLAT_mips64_linux)
937
938typedef
939 struct {
940 unsigned long nraddr; /* where's the code? */
941 }
942 OrigFn;
943
944/* dsll $0,$0, 3
945 * dsll $0,$0, 13
946 * dsll $0,$0, 29
947 * dsll $0,$0, 19*/
948#define __SPECIAL_INSTRUCTION_PREAMBLE \
949 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
950 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
951
952#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
953 _zzq_default, _zzq_request, \
954 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
955 __extension__ \
956 ({ volatile unsigned long int _zzq_args[6]; \
957 volatile unsigned long int _zzq_result; \
958 _zzq_args[0] = (unsigned long int)(_zzq_request); \
959 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
960 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
961 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
962 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
963 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
964 __asm__ volatile("move $11, %1\n\t" /*default*/ \
965 "move $12, %2\n\t" /*ptr*/ \
966 __SPECIAL_INSTRUCTION_PREAMBLE \
967 /* $11 = client_request ( $12 ) */ \
968 "or $13, $13, $13\n\t" \
969 "move %0, $11\n\t" /*result*/ \
970 : "=r" (_zzq_result) \
971 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
972 : "$11", "$12", "memory"); \
973 _zzq_result; \
974 })
975
976#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
977 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
978 volatile unsigned long int __addr; \
979 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
980 /* $11 = guest_NRADDR */ \
981 "or $14, $14, $14\n\t" \
982 "move %0, $11" /*result*/ \
983 : "=r" (__addr) \
984 : \
985 : "$11"); \
986 _zzq_orig->nraddr = __addr; \
987 }
988
989#define VALGRIND_CALL_NOREDIR_T9 \
990 __SPECIAL_INSTRUCTION_PREAMBLE \
991 /* call-noredir $25 */ \
992 "or $15, $15, $15\n\t"
993
994#define VALGRIND_VEX_INJECT_IR() \
995 do { \
996 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
997 "or $11, $11, $11\n\t" \
998 ); \
999 } while (0)
1000
1001#endif /* PLAT_mips64_linux */
1002
1003#if defined(PLAT_nanomips_linux)
1004
1005typedef
1006 struct {
1007 unsigned int nraddr; /* where's the code? */
1008 }
1009 OrigFn;
1010/*
1011 8000 c04d srl zero, zero, 13
1012 8000 c05d srl zero, zero, 29
1013 8000 c043 srl zero, zero, 3
1014 8000 c053 srl zero, zero, 19
1015*/
1016
1017#define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1018 "srl[32] $zero, $zero, 29 \n\t" \
1019 "srl[32] $zero, $zero, 3 \n\t" \
1020 "srl[32] $zero, $zero, 19 \n\t"
1021
1022#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1023 _zzq_default, _zzq_request, \
1024 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1025 __extension__ \
1026 ({ volatile unsigned int _zzq_args[6]; \
1027 volatile unsigned int _zzq_result; \
1028 _zzq_args[0] = (unsigned int)(_zzq_request); \
1029 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1030 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1031 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1032 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1033 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1034 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1035 "move $t0, %2\n\t" /* ptr */ \
1036 __SPECIAL_INSTRUCTION_PREAMBLE \
1037 /* $a7 = client_request( $t0 ) */ \
1038 "or[32] $t0, $t0, $t0\n\t" \
1039 "move %0, $a7\n\t" /* result */ \
1040 : "=r" (_zzq_result) \
1041 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1042 : "$a7", "$t0", "memory"); \
1043 _zzq_result; \
1044 })
1045
1046#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1047 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1048 volatile unsigned long int __addr; \
1049 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1050 /* $a7 = guest_NRADDR */ \
1051 "or[32] $t1, $t1, $t1\n\t" \
1052 "move %0, $a7" /*result*/ \
1053 : "=r" (__addr) \
1054 : \
1055 : "$a7"); \
1056 _zzq_orig->nraddr = __addr; \
1057 }
1058
1059#define VALGRIND_CALL_NOREDIR_T9 \
1060 __SPECIAL_INSTRUCTION_PREAMBLE \
1061 /* call-noredir $25 */ \
1062 "or[32] $t2, $t2, $t2\n\t"
1063
1064#define VALGRIND_VEX_INJECT_IR() \
1065 do { \
1066 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1067 "or[32] $t3, $t3, $t3\n\t" \
1068 ); \
1069 } while (0)
1070
1071#endif
1072/* Insert assembly code for other platforms here... */
1073
1074#endif /* CONFIG_VALGRIND */
1075
1076
1077/* ------------------------------------------------------------------ */
1078/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1079/* ugly. It's the least-worst tradeoff I can think of. */
1080/* ------------------------------------------------------------------ */
1081
1082/* This section defines magic (a.k.a appalling-hack) macros for doing
1083 guaranteed-no-redirection macros, so as to get from function
1084 wrappers to the functions they are wrapping. The whole point is to
1085 construct standard call sequences, but to do the call itself with a
1086 special no-redirect call pseudo-instruction that the JIT
1087 understands and handles specially. This section is long and
1088 repetitious, and I can't see a way to make it shorter.
1089
1090 The naming scheme is as follows:
1091
1092 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1093
1094 'W' stands for "word" and 'v' for "void". Hence there are
1095 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1096 and for each, the possibility of returning a word-typed result, or
1097 no result.
1098*/
1099
1100/* Use these to write the name of your wrapper. NOTE: duplicates
1101 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1102 the default behaviour equivalance class tag "0000" into the name.
1103 See pub_tool_redir.h for details -- normally you don't need to
1104 think about this, though. */
1105
1106/* Use an extra level of macroisation so as to ensure the soname/fnname
1107 args are fully macro-expanded before pasting them together. */
1108#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1109
1110#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1111 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1112
1113#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1114 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1115
1116/* Use this macro from within a wrapper function to collect the
1117 context (address and possibly other info) of the original function.
1118 Once you have that you can then use it in one of the CALL_FN_
1119 macros. The type of the argument _lval is OrigFn. */
1120#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1121
1122/* Also provide end-user facilities for function replacement, rather
1123 than wrapping. A replacement function differs from a wrapper in
1124 that it has no way to get hold of the original function being
1125 called, and hence no way to call onwards to it. In a replacement
1126 function, VALGRIND_GET_ORIG_FN always returns zero. */
1127
1128#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1129 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1130
1131#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1132 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1133
1134/* Derivatives of the main macros below, for calling functions
1135 returning void. */
1136
1137#define CALL_FN_v_v(fnptr) \
1138 do { volatile unsigned long _junk; \
1139 CALL_FN_W_v(_junk,fnptr); } while (0)
1140
1141#define CALL_FN_v_W(fnptr, arg1) \
1142 do { volatile unsigned long _junk; \
1143 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1144
1145#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1146 do { volatile unsigned long _junk; \
1147 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1148
1149#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1150 do { volatile unsigned long _junk; \
1151 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1152
1153#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1154 do { volatile unsigned long _junk; \
1155 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1156
1157#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1158 do { volatile unsigned long _junk; \
1159 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1160
1161#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1162 do { volatile unsigned long _junk; \
1163 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1164
1165#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1166 do { volatile unsigned long _junk; \
1167 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1168
1169/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1170
1171#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1172 || defined(PLAT_x86_solaris)
1173
1174/* These regs are trashed by the hidden call. No need to mention eax
1175 as gcc can already see that, plus causes gcc to bomb. */
1176#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1177
1178/* Macros to save and align the stack before making a function
1179 call and restore it afterwards as gcc may not keep the stack
1180 pointer aligned if it doesn't realise calls are being made
1181 to other functions. */
1182
1183#define VALGRIND_ALIGN_STACK \
1184 "movl %%esp,%%edi\n\t" \
1185 "andl $0xfffffff0,%%esp\n\t"
1186#define VALGRIND_RESTORE_STACK \
1187 "movl %%edi,%%esp\n\t"
1188
1189/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1190 long) == 4. */
1191
1192#define CALL_FN_W_v(lval, orig) \
1193 do { \
1194 volatile OrigFn _orig = (orig); \
1195 volatile unsigned long _argvec[1]; \
1196 volatile unsigned long _res; \
1197 _argvec[0] = (unsigned long)_orig.nraddr; \
1198 __asm__ volatile( \
1199 VALGRIND_ALIGN_STACK \
1200 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1201 VALGRIND_CALL_NOREDIR_EAX \
1202 VALGRIND_RESTORE_STACK \
1203 : /*out*/ "=a" (_res) \
1204 : /*in*/ "a" (&_argvec[0]) \
1205 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1206 ); \
1207 lval = (__typeof__(lval)) _res; \
1208 } while (0)
1209
1210#define CALL_FN_W_W(lval, orig, arg1) \
1211 do { \
1212 volatile OrigFn _orig = (orig); \
1213 volatile unsigned long _argvec[2]; \
1214 volatile unsigned long _res; \
1215 _argvec[0] = (unsigned long)_orig.nraddr; \
1216 _argvec[1] = (unsigned long)(arg1); \
1217 __asm__ volatile( \
1218 VALGRIND_ALIGN_STACK \
1219 "subl $12, %%esp\n\t" \
1220 "pushl 4(%%eax)\n\t" \
1221 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1222 VALGRIND_CALL_NOREDIR_EAX \
1223 VALGRIND_RESTORE_STACK \
1224 : /*out*/ "=a" (_res) \
1225 : /*in*/ "a" (&_argvec[0]) \
1226 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1227 ); \
1228 lval = (__typeof__(lval)) _res; \
1229 } while (0)
1230
1231#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1232 do { \
1233 volatile OrigFn _orig = (orig); \
1234 volatile unsigned long _argvec[3]; \
1235 volatile unsigned long _res; \
1236 _argvec[0] = (unsigned long)_orig.nraddr; \
1237 _argvec[1] = (unsigned long)(arg1); \
1238 _argvec[2] = (unsigned long)(arg2); \
1239 __asm__ volatile( \
1240 VALGRIND_ALIGN_STACK \
1241 "subl $8, %%esp\n\t" \
1242 "pushl 8(%%eax)\n\t" \
1243 "pushl 4(%%eax)\n\t" \
1244 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1245 VALGRIND_CALL_NOREDIR_EAX \
1246 VALGRIND_RESTORE_STACK \
1247 : /*out*/ "=a" (_res) \
1248 : /*in*/ "a" (&_argvec[0]) \
1249 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1250 ); \
1251 lval = (__typeof__(lval)) _res; \
1252 } while (0)
1253
1254#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1255 do { \
1256 volatile OrigFn _orig = (orig); \
1257 volatile unsigned long _argvec[4]; \
1258 volatile unsigned long _res; \
1259 _argvec[0] = (unsigned long)_orig.nraddr; \
1260 _argvec[1] = (unsigned long)(arg1); \
1261 _argvec[2] = (unsigned long)(arg2); \
1262 _argvec[3] = (unsigned long)(arg3); \
1263 __asm__ volatile( \
1264 VALGRIND_ALIGN_STACK \
1265 "subl $4, %%esp\n\t" \
1266 "pushl 12(%%eax)\n\t" \
1267 "pushl 8(%%eax)\n\t" \
1268 "pushl 4(%%eax)\n\t" \
1269 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1270 VALGRIND_CALL_NOREDIR_EAX \
1271 VALGRIND_RESTORE_STACK \
1272 : /*out*/ "=a" (_res) \
1273 : /*in*/ "a" (&_argvec[0]) \
1274 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1275 ); \
1276 lval = (__typeof__(lval)) _res; \
1277 } while (0)
1278
1279#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1280 do { \
1281 volatile OrigFn _orig = (orig); \
1282 volatile unsigned long _argvec[5]; \
1283 volatile unsigned long _res; \
1284 _argvec[0] = (unsigned long)_orig.nraddr; \
1285 _argvec[1] = (unsigned long)(arg1); \
1286 _argvec[2] = (unsigned long)(arg2); \
1287 _argvec[3] = (unsigned long)(arg3); \
1288 _argvec[4] = (unsigned long)(arg4); \
1289 __asm__ volatile( \
1290 VALGRIND_ALIGN_STACK \
1291 "pushl 16(%%eax)\n\t" \
1292 "pushl 12(%%eax)\n\t" \
1293 "pushl 8(%%eax)\n\t" \
1294 "pushl 4(%%eax)\n\t" \
1295 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1296 VALGRIND_CALL_NOREDIR_EAX \
1297 VALGRIND_RESTORE_STACK \
1298 : /*out*/ "=a" (_res) \
1299 : /*in*/ "a" (&_argvec[0]) \
1300 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1301 ); \
1302 lval = (__typeof__(lval)) _res; \
1303 } while (0)
1304
1305#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1306 do { \
1307 volatile OrigFn _orig = (orig); \
1308 volatile unsigned long _argvec[6]; \
1309 volatile unsigned long _res; \
1310 _argvec[0] = (unsigned long)_orig.nraddr; \
1311 _argvec[1] = (unsigned long)(arg1); \
1312 _argvec[2] = (unsigned long)(arg2); \
1313 _argvec[3] = (unsigned long)(arg3); \
1314 _argvec[4] = (unsigned long)(arg4); \
1315 _argvec[5] = (unsigned long)(arg5); \
1316 __asm__ volatile( \
1317 VALGRIND_ALIGN_STACK \
1318 "subl $12, %%esp\n\t" \
1319 "pushl 20(%%eax)\n\t" \
1320 "pushl 16(%%eax)\n\t" \
1321 "pushl 12(%%eax)\n\t" \
1322 "pushl 8(%%eax)\n\t" \
1323 "pushl 4(%%eax)\n\t" \
1324 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1325 VALGRIND_CALL_NOREDIR_EAX \
1326 VALGRIND_RESTORE_STACK \
1327 : /*out*/ "=a" (_res) \
1328 : /*in*/ "a" (&_argvec[0]) \
1329 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1330 ); \
1331 lval = (__typeof__(lval)) _res; \
1332 } while (0)
1333
1334#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1335 do { \
1336 volatile OrigFn _orig = (orig); \
1337 volatile unsigned long _argvec[7]; \
1338 volatile unsigned long _res; \
1339 _argvec[0] = (unsigned long)_orig.nraddr; \
1340 _argvec[1] = (unsigned long)(arg1); \
1341 _argvec[2] = (unsigned long)(arg2); \
1342 _argvec[3] = (unsigned long)(arg3); \
1343 _argvec[4] = (unsigned long)(arg4); \
1344 _argvec[5] = (unsigned long)(arg5); \
1345 _argvec[6] = (unsigned long)(arg6); \
1346 __asm__ volatile( \
1347 VALGRIND_ALIGN_STACK \
1348 "subl $8, %%esp\n\t" \
1349 "pushl 24(%%eax)\n\t" \
1350 "pushl 20(%%eax)\n\t" \
1351 "pushl 16(%%eax)\n\t" \
1352 "pushl 12(%%eax)\n\t" \
1353 "pushl 8(%%eax)\n\t" \
1354 "pushl 4(%%eax)\n\t" \
1355 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1356 VALGRIND_CALL_NOREDIR_EAX \
1357 VALGRIND_RESTORE_STACK \
1358 : /*out*/ "=a" (_res) \
1359 : /*in*/ "a" (&_argvec[0]) \
1360 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1361 ); \
1362 lval = (__typeof__(lval)) _res; \
1363 } while (0)
1364
1365#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1366 arg7) \
1367 do { \
1368 volatile OrigFn _orig = (orig); \
1369 volatile unsigned long _argvec[8]; \
1370 volatile unsigned long _res; \
1371 _argvec[0] = (unsigned long)_orig.nraddr; \
1372 _argvec[1] = (unsigned long)(arg1); \
1373 _argvec[2] = (unsigned long)(arg2); \
1374 _argvec[3] = (unsigned long)(arg3); \
1375 _argvec[4] = (unsigned long)(arg4); \
1376 _argvec[5] = (unsigned long)(arg5); \
1377 _argvec[6] = (unsigned long)(arg6); \
1378 _argvec[7] = (unsigned long)(arg7); \
1379 __asm__ volatile( \
1380 VALGRIND_ALIGN_STACK \
1381 "subl $4, %%esp\n\t" \
1382 "pushl 28(%%eax)\n\t" \
1383 "pushl 24(%%eax)\n\t" \
1384 "pushl 20(%%eax)\n\t" \
1385 "pushl 16(%%eax)\n\t" \
1386 "pushl 12(%%eax)\n\t" \
1387 "pushl 8(%%eax)\n\t" \
1388 "pushl 4(%%eax)\n\t" \
1389 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1390 VALGRIND_CALL_NOREDIR_EAX \
1391 VALGRIND_RESTORE_STACK \
1392 : /*out*/ "=a" (_res) \
1393 : /*in*/ "a" (&_argvec[0]) \
1394 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1395 ); \
1396 lval = (__typeof__(lval)) _res; \
1397 } while (0)
1398
1399#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1400 arg7,arg8) \
1401 do { \
1402 volatile OrigFn _orig = (orig); \
1403 volatile unsigned long _argvec[9]; \
1404 volatile unsigned long _res; \
1405 _argvec[0] = (unsigned long)_orig.nraddr; \
1406 _argvec[1] = (unsigned long)(arg1); \
1407 _argvec[2] = (unsigned long)(arg2); \
1408 _argvec[3] = (unsigned long)(arg3); \
1409 _argvec[4] = (unsigned long)(arg4); \
1410 _argvec[5] = (unsigned long)(arg5); \
1411 _argvec[6] = (unsigned long)(arg6); \
1412 _argvec[7] = (unsigned long)(arg7); \
1413 _argvec[8] = (unsigned long)(arg8); \
1414 __asm__ volatile( \
1415 VALGRIND_ALIGN_STACK \
1416 "pushl 32(%%eax)\n\t" \
1417 "pushl 28(%%eax)\n\t" \
1418 "pushl 24(%%eax)\n\t" \
1419 "pushl 20(%%eax)\n\t" \
1420 "pushl 16(%%eax)\n\t" \
1421 "pushl 12(%%eax)\n\t" \
1422 "pushl 8(%%eax)\n\t" \
1423 "pushl 4(%%eax)\n\t" \
1424 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1425 VALGRIND_CALL_NOREDIR_EAX \
1426 VALGRIND_RESTORE_STACK \
1427 : /*out*/ "=a" (_res) \
1428 : /*in*/ "a" (&_argvec[0]) \
1429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1430 ); \
1431 lval = (__typeof__(lval)) _res; \
1432 } while (0)
1433
1434#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1435 arg7,arg8,arg9) \
1436 do { \
1437 volatile OrigFn _orig = (orig); \
1438 volatile unsigned long _argvec[10]; \
1439 volatile unsigned long _res; \
1440 _argvec[0] = (unsigned long)_orig.nraddr; \
1441 _argvec[1] = (unsigned long)(arg1); \
1442 _argvec[2] = (unsigned long)(arg2); \
1443 _argvec[3] = (unsigned long)(arg3); \
1444 _argvec[4] = (unsigned long)(arg4); \
1445 _argvec[5] = (unsigned long)(arg5); \
1446 _argvec[6] = (unsigned long)(arg6); \
1447 _argvec[7] = (unsigned long)(arg7); \
1448 _argvec[8] = (unsigned long)(arg8); \
1449 _argvec[9] = (unsigned long)(arg9); \
1450 __asm__ volatile( \
1451 VALGRIND_ALIGN_STACK \
1452 "subl $12, %%esp\n\t" \
1453 "pushl 36(%%eax)\n\t" \
1454 "pushl 32(%%eax)\n\t" \
1455 "pushl 28(%%eax)\n\t" \
1456 "pushl 24(%%eax)\n\t" \
1457 "pushl 20(%%eax)\n\t" \
1458 "pushl 16(%%eax)\n\t" \
1459 "pushl 12(%%eax)\n\t" \
1460 "pushl 8(%%eax)\n\t" \
1461 "pushl 4(%%eax)\n\t" \
1462 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1463 VALGRIND_CALL_NOREDIR_EAX \
1464 VALGRIND_RESTORE_STACK \
1465 : /*out*/ "=a" (_res) \
1466 : /*in*/ "a" (&_argvec[0]) \
1467 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1468 ); \
1469 lval = (__typeof__(lval)) _res; \
1470 } while (0)
1471
1472#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1473 arg7,arg8,arg9,arg10) \
1474 do { \
1475 volatile OrigFn _orig = (orig); \
1476 volatile unsigned long _argvec[11]; \
1477 volatile unsigned long _res; \
1478 _argvec[0] = (unsigned long)_orig.nraddr; \
1479 _argvec[1] = (unsigned long)(arg1); \
1480 _argvec[2] = (unsigned long)(arg2); \
1481 _argvec[3] = (unsigned long)(arg3); \
1482 _argvec[4] = (unsigned long)(arg4); \
1483 _argvec[5] = (unsigned long)(arg5); \
1484 _argvec[6] = (unsigned long)(arg6); \
1485 _argvec[7] = (unsigned long)(arg7); \
1486 _argvec[8] = (unsigned long)(arg8); \
1487 _argvec[9] = (unsigned long)(arg9); \
1488 _argvec[10] = (unsigned long)(arg10); \
1489 __asm__ volatile( \
1490 VALGRIND_ALIGN_STACK \
1491 "subl $8, %%esp\n\t" \
1492 "pushl 40(%%eax)\n\t" \
1493 "pushl 36(%%eax)\n\t" \
1494 "pushl 32(%%eax)\n\t" \
1495 "pushl 28(%%eax)\n\t" \
1496 "pushl 24(%%eax)\n\t" \
1497 "pushl 20(%%eax)\n\t" \
1498 "pushl 16(%%eax)\n\t" \
1499 "pushl 12(%%eax)\n\t" \
1500 "pushl 8(%%eax)\n\t" \
1501 "pushl 4(%%eax)\n\t" \
1502 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1503 VALGRIND_CALL_NOREDIR_EAX \
1504 VALGRIND_RESTORE_STACK \
1505 : /*out*/ "=a" (_res) \
1506 : /*in*/ "a" (&_argvec[0]) \
1507 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1508 ); \
1509 lval = (__typeof__(lval)) _res; \
1510 } while (0)
1511
1512#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1513 arg6,arg7,arg8,arg9,arg10, \
1514 arg11) \
1515 do { \
1516 volatile OrigFn _orig = (orig); \
1517 volatile unsigned long _argvec[12]; \
1518 volatile unsigned long _res; \
1519 _argvec[0] = (unsigned long)_orig.nraddr; \
1520 _argvec[1] = (unsigned long)(arg1); \
1521 _argvec[2] = (unsigned long)(arg2); \
1522 _argvec[3] = (unsigned long)(arg3); \
1523 _argvec[4] = (unsigned long)(arg4); \
1524 _argvec[5] = (unsigned long)(arg5); \
1525 _argvec[6] = (unsigned long)(arg6); \
1526 _argvec[7] = (unsigned long)(arg7); \
1527 _argvec[8] = (unsigned long)(arg8); \
1528 _argvec[9] = (unsigned long)(arg9); \
1529 _argvec[10] = (unsigned long)(arg10); \
1530 _argvec[11] = (unsigned long)(arg11); \
1531 __asm__ volatile( \
1532 VALGRIND_ALIGN_STACK \
1533 "subl $4, %%esp\n\t" \
1534 "pushl 44(%%eax)\n\t" \
1535 "pushl 40(%%eax)\n\t" \
1536 "pushl 36(%%eax)\n\t" \
1537 "pushl 32(%%eax)\n\t" \
1538 "pushl 28(%%eax)\n\t" \
1539 "pushl 24(%%eax)\n\t" \
1540 "pushl 20(%%eax)\n\t" \
1541 "pushl 16(%%eax)\n\t" \
1542 "pushl 12(%%eax)\n\t" \
1543 "pushl 8(%%eax)\n\t" \
1544 "pushl 4(%%eax)\n\t" \
1545 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1546 VALGRIND_CALL_NOREDIR_EAX \
1547 VALGRIND_RESTORE_STACK \
1548 : /*out*/ "=a" (_res) \
1549 : /*in*/ "a" (&_argvec[0]) \
1550 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1551 ); \
1552 lval = (__typeof__(lval)) _res; \
1553 } while (0)
1554
1555#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1556 arg6,arg7,arg8,arg9,arg10, \
1557 arg11,arg12) \
1558 do { \
1559 volatile OrigFn _orig = (orig); \
1560 volatile unsigned long _argvec[13]; \
1561 volatile unsigned long _res; \
1562 _argvec[0] = (unsigned long)_orig.nraddr; \
1563 _argvec[1] = (unsigned long)(arg1); \
1564 _argvec[2] = (unsigned long)(arg2); \
1565 _argvec[3] = (unsigned long)(arg3); \
1566 _argvec[4] = (unsigned long)(arg4); \
1567 _argvec[5] = (unsigned long)(arg5); \
1568 _argvec[6] = (unsigned long)(arg6); \
1569 _argvec[7] = (unsigned long)(arg7); \
1570 _argvec[8] = (unsigned long)(arg8); \
1571 _argvec[9] = (unsigned long)(arg9); \
1572 _argvec[10] = (unsigned long)(arg10); \
1573 _argvec[11] = (unsigned long)(arg11); \
1574 _argvec[12] = (unsigned long)(arg12); \
1575 __asm__ volatile( \
1576 VALGRIND_ALIGN_STACK \
1577 "pushl 48(%%eax)\n\t" \
1578 "pushl 44(%%eax)\n\t" \
1579 "pushl 40(%%eax)\n\t" \
1580 "pushl 36(%%eax)\n\t" \
1581 "pushl 32(%%eax)\n\t" \
1582 "pushl 28(%%eax)\n\t" \
1583 "pushl 24(%%eax)\n\t" \
1584 "pushl 20(%%eax)\n\t" \
1585 "pushl 16(%%eax)\n\t" \
1586 "pushl 12(%%eax)\n\t" \
1587 "pushl 8(%%eax)\n\t" \
1588 "pushl 4(%%eax)\n\t" \
1589 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1590 VALGRIND_CALL_NOREDIR_EAX \
1591 VALGRIND_RESTORE_STACK \
1592 : /*out*/ "=a" (_res) \
1593 : /*in*/ "a" (&_argvec[0]) \
1594 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1595 ); \
1596 lval = (__typeof__(lval)) _res; \
1597 } while (0)
1598
1599#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1600
1601/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1602
1603#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1604 || defined(PLAT_amd64_solaris)
1605
1606/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1607
1608/* These regs are trashed by the hidden call. */
1609#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1610 "rdi", "r8", "r9", "r10", "r11"
1611
1612/* This is all pretty complex. It's so as to make stack unwinding
1613 work reliably. See bug 243270. The basic problem is the sub and
1614 add of 128 of %rsp in all of the following macros. If gcc believes
1615 the CFA is in %rsp, then unwinding may fail, because what's at the
1616 CFA is not what gcc "expected" when it constructs the CFIs for the
1617 places where the macros are instantiated.
1618
1619 But we can't just add a CFI annotation to increase the CFA offset
1620 by 128, to match the sub of 128 from %rsp, because we don't know
1621 whether gcc has chosen %rsp as the CFA at that point, or whether it
1622 has chosen some other register (eg, %rbp). In the latter case,
1623 adding a CFI annotation to change the CFA offset is simply wrong.
1624
1625 So the solution is to get hold of the CFA using
1626 __builtin_dwarf_cfa(), put it in a known register, and add a
1627 CFI annotation to say what the register is. We choose %rbp for
1628 this (perhaps perversely), because:
1629
1630 (1) %rbp is already subject to unwinding. If a new register was
1631 chosen then the unwinder would have to unwind it in all stack
1632 traces, which is expensive, and
1633
1634 (2) %rbp is already subject to precise exception updates in the
1635 JIT. If a new register was chosen, we'd have to have precise
1636 exceptions for it too, which reduces performance of the
1637 generated code.
1638
1639 However .. one extra complication. We can't just whack the result
1640 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1641 list of trashed registers at the end of the inline assembly
1642 fragments; gcc won't allow %rbp to appear in that list. Hence
1643 instead we need to stash %rbp in %r15 for the duration of the asm,
1644 and say that %r15 is trashed instead. gcc seems happy to go with
1645 that.
1646
1647 Oh .. and this all needs to be conditionalised so that it is
1648 unchanged from before this commit, when compiled with older gccs
1649 that don't support __builtin_dwarf_cfa. Furthermore, since
1650 this header file is freestanding, it has to be independent of
1651 config.h, and so the following conditionalisation cannot depend on
1652 configure time checks.
1653
1654 Although it's not clear from
1655 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1656 this expression excludes Darwin.
1657 .cfi directives in Darwin assembly appear to be completely
1658 different and I haven't investigated how they work.
1659
1660 For even more entertainment value, note we have to use the
1661 completely undocumented __builtin_dwarf_cfa(), which appears to
1662 really compute the CFA, whereas __builtin_frame_address(0) claims
1663 to but actually doesn't. See
1664 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1665*/
1666#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1667# define __FRAME_POINTER \
1668 ,"r"(__builtin_dwarf_cfa())
1669# define VALGRIND_CFI_PROLOGUE \
1670 "movq %%rbp, %%r15\n\t" \
1671 "movq %2, %%rbp\n\t" \
1672 ".cfi_remember_state\n\t" \
1673 ".cfi_def_cfa rbp, 0\n\t"
1674# define VALGRIND_CFI_EPILOGUE \
1675 "movq %%r15, %%rbp\n\t" \
1676 ".cfi_restore_state\n\t"
1677#else
1678# define __FRAME_POINTER
1679# define VALGRIND_CFI_PROLOGUE
1680# define VALGRIND_CFI_EPILOGUE
1681#endif
1682
1683/* Macros to save and align the stack before making a function
1684 call and restore it afterwards as gcc may not keep the stack
1685 pointer aligned if it doesn't realise calls are being made
1686 to other functions. */
1687
1688#define VALGRIND_ALIGN_STACK \
1689 "movq %%rsp,%%r14\n\t" \
1690 "andq $0xfffffffffffffff0,%%rsp\n\t"
1691#define VALGRIND_RESTORE_STACK \
1692 "movq %%r14,%%rsp\n\t"
1693
1694/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1695 long) == 8. */
1696
1697/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1698 macros. In order not to trash the stack redzone, we need to drop
1699 %rsp by 128 before the hidden call, and restore afterwards. The
1700 nastyness is that it is only by luck that the stack still appears
1701 to be unwindable during the hidden call - since then the behaviour
1702 of any routine using this macro does not match what the CFI data
1703 says. Sigh.
1704
1705 Why is this important? Imagine that a wrapper has a stack
1706 allocated local, and passes to the hidden call, a pointer to it.
1707 Because gcc does not know about the hidden call, it may allocate
1708 that local in the redzone. Unfortunately the hidden call may then
1709 trash it before it comes to use it. So we must step clear of the
1710 redzone, for the duration of the hidden call, to make it safe.
1711
1712 Probably the same problem afflicts the other redzone-style ABIs too
1713 (ppc64-linux); but for those, the stack is
1714 self describing (none of this CFI nonsense) so at least messing
1715 with the stack pointer doesn't give a danger of non-unwindable
1716 stack. */
1717
1718#define CALL_FN_W_v(lval, orig) \
1719 do { \
1720 volatile OrigFn _orig = (orig); \
1721 volatile unsigned long _argvec[1]; \
1722 volatile unsigned long _res; \
1723 _argvec[0] = (unsigned long)_orig.nraddr; \
1724 __asm__ volatile( \
1725 VALGRIND_CFI_PROLOGUE \
1726 VALGRIND_ALIGN_STACK \
1727 "subq $128,%%rsp\n\t" \
1728 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1729 VALGRIND_CALL_NOREDIR_RAX \
1730 VALGRIND_RESTORE_STACK \
1731 VALGRIND_CFI_EPILOGUE \
1732 : /*out*/ "=a" (_res) \
1733 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1734 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1735 ); \
1736 lval = (__typeof__(lval)) _res; \
1737 } while (0)
1738
1739#define CALL_FN_W_W(lval, orig, arg1) \
1740 do { \
1741 volatile OrigFn _orig = (orig); \
1742 volatile unsigned long _argvec[2]; \
1743 volatile unsigned long _res; \
1744 _argvec[0] = (unsigned long)_orig.nraddr; \
1745 _argvec[1] = (unsigned long)(arg1); \
1746 __asm__ volatile( \
1747 VALGRIND_CFI_PROLOGUE \
1748 VALGRIND_ALIGN_STACK \
1749 "subq $128,%%rsp\n\t" \
1750 "movq 8(%%rax), %%rdi\n\t" \
1751 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1752 VALGRIND_CALL_NOREDIR_RAX \
1753 VALGRIND_RESTORE_STACK \
1754 VALGRIND_CFI_EPILOGUE \
1755 : /*out*/ "=a" (_res) \
1756 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1757 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1758 ); \
1759 lval = (__typeof__(lval)) _res; \
1760 } while (0)
1761
1762#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1763 do { \
1764 volatile OrigFn _orig = (orig); \
1765 volatile unsigned long _argvec[3]; \
1766 volatile unsigned long _res; \
1767 _argvec[0] = (unsigned long)_orig.nraddr; \
1768 _argvec[1] = (unsigned long)(arg1); \
1769 _argvec[2] = (unsigned long)(arg2); \
1770 __asm__ volatile( \
1771 VALGRIND_CFI_PROLOGUE \
1772 VALGRIND_ALIGN_STACK \
1773 "subq $128,%%rsp\n\t" \
1774 "movq 16(%%rax), %%rsi\n\t" \
1775 "movq 8(%%rax), %%rdi\n\t" \
1776 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1777 VALGRIND_CALL_NOREDIR_RAX \
1778 VALGRIND_RESTORE_STACK \
1779 VALGRIND_CFI_EPILOGUE \
1780 : /*out*/ "=a" (_res) \
1781 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1782 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1783 ); \
1784 lval = (__typeof__(lval)) _res; \
1785 } while (0)
1786
1787#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1788 do { \
1789 volatile OrigFn _orig = (orig); \
1790 volatile unsigned long _argvec[4]; \
1791 volatile unsigned long _res; \
1792 _argvec[0] = (unsigned long)_orig.nraddr; \
1793 _argvec[1] = (unsigned long)(arg1); \
1794 _argvec[2] = (unsigned long)(arg2); \
1795 _argvec[3] = (unsigned long)(arg3); \
1796 __asm__ volatile( \
1797 VALGRIND_CFI_PROLOGUE \
1798 VALGRIND_ALIGN_STACK \
1799 "subq $128,%%rsp\n\t" \
1800 "movq 24(%%rax), %%rdx\n\t" \
1801 "movq 16(%%rax), %%rsi\n\t" \
1802 "movq 8(%%rax), %%rdi\n\t" \
1803 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1804 VALGRIND_CALL_NOREDIR_RAX \
1805 VALGRIND_RESTORE_STACK \
1806 VALGRIND_CFI_EPILOGUE \
1807 : /*out*/ "=a" (_res) \
1808 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1809 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1810 ); \
1811 lval = (__typeof__(lval)) _res; \
1812 } while (0)
1813
1814#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1815 do { \
1816 volatile OrigFn _orig = (orig); \
1817 volatile unsigned long _argvec[5]; \
1818 volatile unsigned long _res; \
1819 _argvec[0] = (unsigned long)_orig.nraddr; \
1820 _argvec[1] = (unsigned long)(arg1); \
1821 _argvec[2] = (unsigned long)(arg2); \
1822 _argvec[3] = (unsigned long)(arg3); \
1823 _argvec[4] = (unsigned long)(arg4); \
1824 __asm__ volatile( \
1825 VALGRIND_CFI_PROLOGUE \
1826 VALGRIND_ALIGN_STACK \
1827 "subq $128,%%rsp\n\t" \
1828 "movq 32(%%rax), %%rcx\n\t" \
1829 "movq 24(%%rax), %%rdx\n\t" \
1830 "movq 16(%%rax), %%rsi\n\t" \
1831 "movq 8(%%rax), %%rdi\n\t" \
1832 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1833 VALGRIND_CALL_NOREDIR_RAX \
1834 VALGRIND_RESTORE_STACK \
1835 VALGRIND_CFI_EPILOGUE \
1836 : /*out*/ "=a" (_res) \
1837 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1838 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1839 ); \
1840 lval = (__typeof__(lval)) _res; \
1841 } while (0)
1842
1843#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1844 do { \
1845 volatile OrigFn _orig = (orig); \
1846 volatile unsigned long _argvec[6]; \
1847 volatile unsigned long _res; \
1848 _argvec[0] = (unsigned long)_orig.nraddr; \
1849 _argvec[1] = (unsigned long)(arg1); \
1850 _argvec[2] = (unsigned long)(arg2); \
1851 _argvec[3] = (unsigned long)(arg3); \
1852 _argvec[4] = (unsigned long)(arg4); \
1853 _argvec[5] = (unsigned long)(arg5); \
1854 __asm__ volatile( \
1855 VALGRIND_CFI_PROLOGUE \
1856 VALGRIND_ALIGN_STACK \
1857 "subq $128,%%rsp\n\t" \
1858 "movq 40(%%rax), %%r8\n\t" \
1859 "movq 32(%%rax), %%rcx\n\t" \
1860 "movq 24(%%rax), %%rdx\n\t" \
1861 "movq 16(%%rax), %%rsi\n\t" \
1862 "movq 8(%%rax), %%rdi\n\t" \
1863 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1864 VALGRIND_CALL_NOREDIR_RAX \
1865 VALGRIND_RESTORE_STACK \
1866 VALGRIND_CFI_EPILOGUE \
1867 : /*out*/ "=a" (_res) \
1868 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1869 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1870 ); \
1871 lval = (__typeof__(lval)) _res; \
1872 } while (0)
1873
1874#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1875 do { \
1876 volatile OrigFn _orig = (orig); \
1877 volatile unsigned long _argvec[7]; \
1878 volatile unsigned long _res; \
1879 _argvec[0] = (unsigned long)_orig.nraddr; \
1880 _argvec[1] = (unsigned long)(arg1); \
1881 _argvec[2] = (unsigned long)(arg2); \
1882 _argvec[3] = (unsigned long)(arg3); \
1883 _argvec[4] = (unsigned long)(arg4); \
1884 _argvec[5] = (unsigned long)(arg5); \
1885 _argvec[6] = (unsigned long)(arg6); \
1886 __asm__ volatile( \
1887 VALGRIND_CFI_PROLOGUE \
1888 VALGRIND_ALIGN_STACK \
1889 "subq $128,%%rsp\n\t" \
1890 "movq 48(%%rax), %%r9\n\t" \
1891 "movq 40(%%rax), %%r8\n\t" \
1892 "movq 32(%%rax), %%rcx\n\t" \
1893 "movq 24(%%rax), %%rdx\n\t" \
1894 "movq 16(%%rax), %%rsi\n\t" \
1895 "movq 8(%%rax), %%rdi\n\t" \
1896 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1897 VALGRIND_CALL_NOREDIR_RAX \
1898 VALGRIND_RESTORE_STACK \
1899 VALGRIND_CFI_EPILOGUE \
1900 : /*out*/ "=a" (_res) \
1901 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1902 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1903 ); \
1904 lval = (__typeof__(lval)) _res; \
1905 } while (0)
1906
1907#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1908 arg7) \
1909 do { \
1910 volatile OrigFn _orig = (orig); \
1911 volatile unsigned long _argvec[8]; \
1912 volatile unsigned long _res; \
1913 _argvec[0] = (unsigned long)_orig.nraddr; \
1914 _argvec[1] = (unsigned long)(arg1); \
1915 _argvec[2] = (unsigned long)(arg2); \
1916 _argvec[3] = (unsigned long)(arg3); \
1917 _argvec[4] = (unsigned long)(arg4); \
1918 _argvec[5] = (unsigned long)(arg5); \
1919 _argvec[6] = (unsigned long)(arg6); \
1920 _argvec[7] = (unsigned long)(arg7); \
1921 __asm__ volatile( \
1922 VALGRIND_CFI_PROLOGUE \
1923 VALGRIND_ALIGN_STACK \
1924 "subq $136,%%rsp\n\t" \
1925 "pushq 56(%%rax)\n\t" \
1926 "movq 48(%%rax), %%r9\n\t" \
1927 "movq 40(%%rax), %%r8\n\t" \
1928 "movq 32(%%rax), %%rcx\n\t" \
1929 "movq 24(%%rax), %%rdx\n\t" \
1930 "movq 16(%%rax), %%rsi\n\t" \
1931 "movq 8(%%rax), %%rdi\n\t" \
1932 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1933 VALGRIND_CALL_NOREDIR_RAX \
1934 VALGRIND_RESTORE_STACK \
1935 VALGRIND_CFI_EPILOGUE \
1936 : /*out*/ "=a" (_res) \
1937 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1938 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1939 ); \
1940 lval = (__typeof__(lval)) _res; \
1941 } while (0)
1942
1943#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1944 arg7,arg8) \
1945 do { \
1946 volatile OrigFn _orig = (orig); \
1947 volatile unsigned long _argvec[9]; \
1948 volatile unsigned long _res; \
1949 _argvec[0] = (unsigned long)_orig.nraddr; \
1950 _argvec[1] = (unsigned long)(arg1); \
1951 _argvec[2] = (unsigned long)(arg2); \
1952 _argvec[3] = (unsigned long)(arg3); \
1953 _argvec[4] = (unsigned long)(arg4); \
1954 _argvec[5] = (unsigned long)(arg5); \
1955 _argvec[6] = (unsigned long)(arg6); \
1956 _argvec[7] = (unsigned long)(arg7); \
1957 _argvec[8] = (unsigned long)(arg8); \
1958 __asm__ volatile( \
1959 VALGRIND_CFI_PROLOGUE \
1960 VALGRIND_ALIGN_STACK \
1961 "subq $128,%%rsp\n\t" \
1962 "pushq 64(%%rax)\n\t" \
1963 "pushq 56(%%rax)\n\t" \
1964 "movq 48(%%rax), %%r9\n\t" \
1965 "movq 40(%%rax), %%r8\n\t" \
1966 "movq 32(%%rax), %%rcx\n\t" \
1967 "movq 24(%%rax), %%rdx\n\t" \
1968 "movq 16(%%rax), %%rsi\n\t" \
1969 "movq 8(%%rax), %%rdi\n\t" \
1970 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1971 VALGRIND_CALL_NOREDIR_RAX \
1972 VALGRIND_RESTORE_STACK \
1973 VALGRIND_CFI_EPILOGUE \
1974 : /*out*/ "=a" (_res) \
1975 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1976 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1977 ); \
1978 lval = (__typeof__(lval)) _res; \
1979 } while (0)
1980
1981#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1982 arg7,arg8,arg9) \
1983 do { \
1984 volatile OrigFn _orig = (orig); \
1985 volatile unsigned long _argvec[10]; \
1986 volatile unsigned long _res; \
1987 _argvec[0] = (unsigned long)_orig.nraddr; \
1988 _argvec[1] = (unsigned long)(arg1); \
1989 _argvec[2] = (unsigned long)(arg2); \
1990 _argvec[3] = (unsigned long)(arg3); \
1991 _argvec[4] = (unsigned long)(arg4); \
1992 _argvec[5] = (unsigned long)(arg5); \
1993 _argvec[6] = (unsigned long)(arg6); \
1994 _argvec[7] = (unsigned long)(arg7); \
1995 _argvec[8] = (unsigned long)(arg8); \
1996 _argvec[9] = (unsigned long)(arg9); \
1997 __asm__ volatile( \
1998 VALGRIND_CFI_PROLOGUE \
1999 VALGRIND_ALIGN_STACK \
2000 "subq $136,%%rsp\n\t" \
2001 "pushq 72(%%rax)\n\t" \
2002 "pushq 64(%%rax)\n\t" \
2003 "pushq 56(%%rax)\n\t" \
2004 "movq 48(%%rax), %%r9\n\t" \
2005 "movq 40(%%rax), %%r8\n\t" \
2006 "movq 32(%%rax), %%rcx\n\t" \
2007 "movq 24(%%rax), %%rdx\n\t" \
2008 "movq 16(%%rax), %%rsi\n\t" \
2009 "movq 8(%%rax), %%rdi\n\t" \
2010 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2011 VALGRIND_CALL_NOREDIR_RAX \
2012 VALGRIND_RESTORE_STACK \
2013 VALGRIND_CFI_EPILOGUE \
2014 : /*out*/ "=a" (_res) \
2015 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2016 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2017 ); \
2018 lval = (__typeof__(lval)) _res; \
2019 } while (0)
2020
2021#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2022 arg7,arg8,arg9,arg10) \
2023 do { \
2024 volatile OrigFn _orig = (orig); \
2025 volatile unsigned long _argvec[11]; \
2026 volatile unsigned long _res; \
2027 _argvec[0] = (unsigned long)_orig.nraddr; \
2028 _argvec[1] = (unsigned long)(arg1); \
2029 _argvec[2] = (unsigned long)(arg2); \
2030 _argvec[3] = (unsigned long)(arg3); \
2031 _argvec[4] = (unsigned long)(arg4); \
2032 _argvec[5] = (unsigned long)(arg5); \
2033 _argvec[6] = (unsigned long)(arg6); \
2034 _argvec[7] = (unsigned long)(arg7); \
2035 _argvec[8] = (unsigned long)(arg8); \
2036 _argvec[9] = (unsigned long)(arg9); \
2037 _argvec[10] = (unsigned long)(arg10); \
2038 __asm__ volatile( \
2039 VALGRIND_CFI_PROLOGUE \
2040 VALGRIND_ALIGN_STACK \
2041 "subq $128,%%rsp\n\t" \
2042 "pushq 80(%%rax)\n\t" \
2043 "pushq 72(%%rax)\n\t" \
2044 "pushq 64(%%rax)\n\t" \
2045 "pushq 56(%%rax)\n\t" \
2046 "movq 48(%%rax), %%r9\n\t" \
2047 "movq 40(%%rax), %%r8\n\t" \
2048 "movq 32(%%rax), %%rcx\n\t" \
2049 "movq 24(%%rax), %%rdx\n\t" \
2050 "movq 16(%%rax), %%rsi\n\t" \
2051 "movq 8(%%rax), %%rdi\n\t" \
2052 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2053 VALGRIND_CALL_NOREDIR_RAX \
2054 VALGRIND_RESTORE_STACK \
2055 VALGRIND_CFI_EPILOGUE \
2056 : /*out*/ "=a" (_res) \
2057 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2058 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2059 ); \
2060 lval = (__typeof__(lval)) _res; \
2061 } while (0)
2062
2063#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2064 arg7,arg8,arg9,arg10,arg11) \
2065 do { \
2066 volatile OrigFn _orig = (orig); \
2067 volatile unsigned long _argvec[12]; \
2068 volatile unsigned long _res; \
2069 _argvec[0] = (unsigned long)_orig.nraddr; \
2070 _argvec[1] = (unsigned long)(arg1); \
2071 _argvec[2] = (unsigned long)(arg2); \
2072 _argvec[3] = (unsigned long)(arg3); \
2073 _argvec[4] = (unsigned long)(arg4); \
2074 _argvec[5] = (unsigned long)(arg5); \
2075 _argvec[6] = (unsigned long)(arg6); \
2076 _argvec[7] = (unsigned long)(arg7); \
2077 _argvec[8] = (unsigned long)(arg8); \
2078 _argvec[9] = (unsigned long)(arg9); \
2079 _argvec[10] = (unsigned long)(arg10); \
2080 _argvec[11] = (unsigned long)(arg11); \
2081 __asm__ volatile( \
2082 VALGRIND_CFI_PROLOGUE \
2083 VALGRIND_ALIGN_STACK \
2084 "subq $136,%%rsp\n\t" \
2085 "pushq 88(%%rax)\n\t" \
2086 "pushq 80(%%rax)\n\t" \
2087 "pushq 72(%%rax)\n\t" \
2088 "pushq 64(%%rax)\n\t" \
2089 "pushq 56(%%rax)\n\t" \
2090 "movq 48(%%rax), %%r9\n\t" \
2091 "movq 40(%%rax), %%r8\n\t" \
2092 "movq 32(%%rax), %%rcx\n\t" \
2093 "movq 24(%%rax), %%rdx\n\t" \
2094 "movq 16(%%rax), %%rsi\n\t" \
2095 "movq 8(%%rax), %%rdi\n\t" \
2096 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2097 VALGRIND_CALL_NOREDIR_RAX \
2098 VALGRIND_RESTORE_STACK \
2099 VALGRIND_CFI_EPILOGUE \
2100 : /*out*/ "=a" (_res) \
2101 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2102 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2103 ); \
2104 lval = (__typeof__(lval)) _res; \
2105 } while (0)
2106
2107#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2108 arg7,arg8,arg9,arg10,arg11,arg12) \
2109 do { \
2110 volatile OrigFn _orig = (orig); \
2111 volatile unsigned long _argvec[13]; \
2112 volatile unsigned long _res; \
2113 _argvec[0] = (unsigned long)_orig.nraddr; \
2114 _argvec[1] = (unsigned long)(arg1); \
2115 _argvec[2] = (unsigned long)(arg2); \
2116 _argvec[3] = (unsigned long)(arg3); \
2117 _argvec[4] = (unsigned long)(arg4); \
2118 _argvec[5] = (unsigned long)(arg5); \
2119 _argvec[6] = (unsigned long)(arg6); \
2120 _argvec[7] = (unsigned long)(arg7); \
2121 _argvec[8] = (unsigned long)(arg8); \
2122 _argvec[9] = (unsigned long)(arg9); \
2123 _argvec[10] = (unsigned long)(arg10); \
2124 _argvec[11] = (unsigned long)(arg11); \
2125 _argvec[12] = (unsigned long)(arg12); \
2126 __asm__ volatile( \
2127 VALGRIND_CFI_PROLOGUE \
2128 VALGRIND_ALIGN_STACK \
2129 "subq $128,%%rsp\n\t" \
2130 "pushq 96(%%rax)\n\t" \
2131 "pushq 88(%%rax)\n\t" \
2132 "pushq 80(%%rax)\n\t" \
2133 "pushq 72(%%rax)\n\t" \
2134 "pushq 64(%%rax)\n\t" \
2135 "pushq 56(%%rax)\n\t" \
2136 "movq 48(%%rax), %%r9\n\t" \
2137 "movq 40(%%rax), %%r8\n\t" \
2138 "movq 32(%%rax), %%rcx\n\t" \
2139 "movq 24(%%rax), %%rdx\n\t" \
2140 "movq 16(%%rax), %%rsi\n\t" \
2141 "movq 8(%%rax), %%rdi\n\t" \
2142 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2143 VALGRIND_CALL_NOREDIR_RAX \
2144 VALGRIND_RESTORE_STACK \
2145 VALGRIND_CFI_EPILOGUE \
2146 : /*out*/ "=a" (_res) \
2147 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2148 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2149 ); \
2150 lval = (__typeof__(lval)) _res; \
2151 } while (0)
2152
2153#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2154
2155/* ------------------------ ppc32-linux ------------------------ */
2156
2157#if defined(PLAT_ppc32_linux)
2158
2159/* This is useful for finding out about the on-stack stuff:
2160
2161 extern int f9 ( int,int,int,int,int,int,int,int,int );
2162 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2163 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2164 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2165
2166 int g9 ( void ) {
2167 return f9(11,22,33,44,55,66,77,88,99);
2168 }
2169 int g10 ( void ) {
2170 return f10(11,22,33,44,55,66,77,88,99,110);
2171 }
2172 int g11 ( void ) {
2173 return f11(11,22,33,44,55,66,77,88,99,110,121);
2174 }
2175 int g12 ( void ) {
2176 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2177 }
2178*/
2179
2180/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2181
2182/* These regs are trashed by the hidden call. */
2183#define __CALLER_SAVED_REGS \
2184 "lr", "ctr", "xer", \
2185 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2186 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2187 "r11", "r12", "r13"
2188
2189/* Macros to save and align the stack before making a function
2190 call and restore it afterwards as gcc may not keep the stack
2191 pointer aligned if it doesn't realise calls are being made
2192 to other functions. */
2193
2194#define VALGRIND_ALIGN_STACK \
2195 "mr 28,1\n\t" \
2196 "rlwinm 1,1,0,0,27\n\t"
2197#define VALGRIND_RESTORE_STACK \
2198 "mr 1,28\n\t"
2199
2200/* These CALL_FN_ macros assume that on ppc32-linux,
2201 sizeof(unsigned long) == 4. */
2202
2203#define CALL_FN_W_v(lval, orig) \
2204 do { \
2205 volatile OrigFn _orig = (orig); \
2206 volatile unsigned long _argvec[1]; \
2207 volatile unsigned long _res; \
2208 _argvec[0] = (unsigned long)_orig.nraddr; \
2209 __asm__ volatile( \
2210 VALGRIND_ALIGN_STACK \
2211 "mr 11,%1\n\t" \
2212 "lwz 11,0(11)\n\t" /* target->r11 */ \
2213 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2214 VALGRIND_RESTORE_STACK \
2215 "mr %0,3" \
2216 : /*out*/ "=r" (_res) \
2217 : /*in*/ "r" (&_argvec[0]) \
2218 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2219 ); \
2220 lval = (__typeof__(lval)) _res; \
2221 } while (0)
2222
2223#define CALL_FN_W_W(lval, orig, arg1) \
2224 do { \
2225 volatile OrigFn _orig = (orig); \
2226 volatile unsigned long _argvec[2]; \
2227 volatile unsigned long _res; \
2228 _argvec[0] = (unsigned long)_orig.nraddr; \
2229 _argvec[1] = (unsigned long)arg1; \
2230 __asm__ volatile( \
2231 VALGRIND_ALIGN_STACK \
2232 "mr 11,%1\n\t" \
2233 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2234 "lwz 11,0(11)\n\t" /* target->r11 */ \
2235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2236 VALGRIND_RESTORE_STACK \
2237 "mr %0,3" \
2238 : /*out*/ "=r" (_res) \
2239 : /*in*/ "r" (&_argvec[0]) \
2240 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2241 ); \
2242 lval = (__typeof__(lval)) _res; \
2243 } while (0)
2244
2245#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2246 do { \
2247 volatile OrigFn _orig = (orig); \
2248 volatile unsigned long _argvec[3]; \
2249 volatile unsigned long _res; \
2250 _argvec[0] = (unsigned long)_orig.nraddr; \
2251 _argvec[1] = (unsigned long)arg1; \
2252 _argvec[2] = (unsigned long)arg2; \
2253 __asm__ volatile( \
2254 VALGRIND_ALIGN_STACK \
2255 "mr 11,%1\n\t" \
2256 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2257 "lwz 4,8(11)\n\t" \
2258 "lwz 11,0(11)\n\t" /* target->r11 */ \
2259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2260 VALGRIND_RESTORE_STACK \
2261 "mr %0,3" \
2262 : /*out*/ "=r" (_res) \
2263 : /*in*/ "r" (&_argvec[0]) \
2264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2265 ); \
2266 lval = (__typeof__(lval)) _res; \
2267 } while (0)
2268
2269#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2270 do { \
2271 volatile OrigFn _orig = (orig); \
2272 volatile unsigned long _argvec[4]; \
2273 volatile unsigned long _res; \
2274 _argvec[0] = (unsigned long)_orig.nraddr; \
2275 _argvec[1] = (unsigned long)arg1; \
2276 _argvec[2] = (unsigned long)arg2; \
2277 _argvec[3] = (unsigned long)arg3; \
2278 __asm__ volatile( \
2279 VALGRIND_ALIGN_STACK \
2280 "mr 11,%1\n\t" \
2281 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2282 "lwz 4,8(11)\n\t" \
2283 "lwz 5,12(11)\n\t" \
2284 "lwz 11,0(11)\n\t" /* target->r11 */ \
2285 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2286 VALGRIND_RESTORE_STACK \
2287 "mr %0,3" \
2288 : /*out*/ "=r" (_res) \
2289 : /*in*/ "r" (&_argvec[0]) \
2290 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2291 ); \
2292 lval = (__typeof__(lval)) _res; \
2293 } while (0)
2294
2295#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2296 do { \
2297 volatile OrigFn _orig = (orig); \
2298 volatile unsigned long _argvec[5]; \
2299 volatile unsigned long _res; \
2300 _argvec[0] = (unsigned long)_orig.nraddr; \
2301 _argvec[1] = (unsigned long)arg1; \
2302 _argvec[2] = (unsigned long)arg2; \
2303 _argvec[3] = (unsigned long)arg3; \
2304 _argvec[4] = (unsigned long)arg4; \
2305 __asm__ volatile( \
2306 VALGRIND_ALIGN_STACK \
2307 "mr 11,%1\n\t" \
2308 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2309 "lwz 4,8(11)\n\t" \
2310 "lwz 5,12(11)\n\t" \
2311 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2312 "lwz 11,0(11)\n\t" /* target->r11 */ \
2313 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2314 VALGRIND_RESTORE_STACK \
2315 "mr %0,3" \
2316 : /*out*/ "=r" (_res) \
2317 : /*in*/ "r" (&_argvec[0]) \
2318 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2319 ); \
2320 lval = (__typeof__(lval)) _res; \
2321 } while (0)
2322
2323#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2324 do { \
2325 volatile OrigFn _orig = (orig); \
2326 volatile unsigned long _argvec[6]; \
2327 volatile unsigned long _res; \
2328 _argvec[0] = (unsigned long)_orig.nraddr; \
2329 _argvec[1] = (unsigned long)arg1; \
2330 _argvec[2] = (unsigned long)arg2; \
2331 _argvec[3] = (unsigned long)arg3; \
2332 _argvec[4] = (unsigned long)arg4; \
2333 _argvec[5] = (unsigned long)arg5; \
2334 __asm__ volatile( \
2335 VALGRIND_ALIGN_STACK \
2336 "mr 11,%1\n\t" \
2337 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2338 "lwz 4,8(11)\n\t" \
2339 "lwz 5,12(11)\n\t" \
2340 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2341 "lwz 7,20(11)\n\t" \
2342 "lwz 11,0(11)\n\t" /* target->r11 */ \
2343 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2344 VALGRIND_RESTORE_STACK \
2345 "mr %0,3" \
2346 : /*out*/ "=r" (_res) \
2347 : /*in*/ "r" (&_argvec[0]) \
2348 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2349 ); \
2350 lval = (__typeof__(lval)) _res; \
2351 } while (0)
2352
2353#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2354 do { \
2355 volatile OrigFn _orig = (orig); \
2356 volatile unsigned long _argvec[7]; \
2357 volatile unsigned long _res; \
2358 _argvec[0] = (unsigned long)_orig.nraddr; \
2359 _argvec[1] = (unsigned long)arg1; \
2360 _argvec[2] = (unsigned long)arg2; \
2361 _argvec[3] = (unsigned long)arg3; \
2362 _argvec[4] = (unsigned long)arg4; \
2363 _argvec[5] = (unsigned long)arg5; \
2364 _argvec[6] = (unsigned long)arg6; \
2365 __asm__ volatile( \
2366 VALGRIND_ALIGN_STACK \
2367 "mr 11,%1\n\t" \
2368 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2369 "lwz 4,8(11)\n\t" \
2370 "lwz 5,12(11)\n\t" \
2371 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2372 "lwz 7,20(11)\n\t" \
2373 "lwz 8,24(11)\n\t" \
2374 "lwz 11,0(11)\n\t" /* target->r11 */ \
2375 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2376 VALGRIND_RESTORE_STACK \
2377 "mr %0,3" \
2378 : /*out*/ "=r" (_res) \
2379 : /*in*/ "r" (&_argvec[0]) \
2380 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2381 ); \
2382 lval = (__typeof__(lval)) _res; \
2383 } while (0)
2384
2385#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2386 arg7) \
2387 do { \
2388 volatile OrigFn _orig = (orig); \
2389 volatile unsigned long _argvec[8]; \
2390 volatile unsigned long _res; \
2391 _argvec[0] = (unsigned long)_orig.nraddr; \
2392 _argvec[1] = (unsigned long)arg1; \
2393 _argvec[2] = (unsigned long)arg2; \
2394 _argvec[3] = (unsigned long)arg3; \
2395 _argvec[4] = (unsigned long)arg4; \
2396 _argvec[5] = (unsigned long)arg5; \
2397 _argvec[6] = (unsigned long)arg6; \
2398 _argvec[7] = (unsigned long)arg7; \
2399 __asm__ volatile( \
2400 VALGRIND_ALIGN_STACK \
2401 "mr 11,%1\n\t" \
2402 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2403 "lwz 4,8(11)\n\t" \
2404 "lwz 5,12(11)\n\t" \
2405 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2406 "lwz 7,20(11)\n\t" \
2407 "lwz 8,24(11)\n\t" \
2408 "lwz 9,28(11)\n\t" \
2409 "lwz 11,0(11)\n\t" /* target->r11 */ \
2410 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2411 VALGRIND_RESTORE_STACK \
2412 "mr %0,3" \
2413 : /*out*/ "=r" (_res) \
2414 : /*in*/ "r" (&_argvec[0]) \
2415 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2416 ); \
2417 lval = (__typeof__(lval)) _res; \
2418 } while (0)
2419
2420#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2421 arg7,arg8) \
2422 do { \
2423 volatile OrigFn _orig = (orig); \
2424 volatile unsigned long _argvec[9]; \
2425 volatile unsigned long _res; \
2426 _argvec[0] = (unsigned long)_orig.nraddr; \
2427 _argvec[1] = (unsigned long)arg1; \
2428 _argvec[2] = (unsigned long)arg2; \
2429 _argvec[3] = (unsigned long)arg3; \
2430 _argvec[4] = (unsigned long)arg4; \
2431 _argvec[5] = (unsigned long)arg5; \
2432 _argvec[6] = (unsigned long)arg6; \
2433 _argvec[7] = (unsigned long)arg7; \
2434 _argvec[8] = (unsigned long)arg8; \
2435 __asm__ volatile( \
2436 VALGRIND_ALIGN_STACK \
2437 "mr 11,%1\n\t" \
2438 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2439 "lwz 4,8(11)\n\t" \
2440 "lwz 5,12(11)\n\t" \
2441 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2442 "lwz 7,20(11)\n\t" \
2443 "lwz 8,24(11)\n\t" \
2444 "lwz 9,28(11)\n\t" \
2445 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2446 "lwz 11,0(11)\n\t" /* target->r11 */ \
2447 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2448 VALGRIND_RESTORE_STACK \
2449 "mr %0,3" \
2450 : /*out*/ "=r" (_res) \
2451 : /*in*/ "r" (&_argvec[0]) \
2452 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2453 ); \
2454 lval = (__typeof__(lval)) _res; \
2455 } while (0)
2456
2457#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2458 arg7,arg8,arg9) \
2459 do { \
2460 volatile OrigFn _orig = (orig); \
2461 volatile unsigned long _argvec[10]; \
2462 volatile unsigned long _res; \
2463 _argvec[0] = (unsigned long)_orig.nraddr; \
2464 _argvec[1] = (unsigned long)arg1; \
2465 _argvec[2] = (unsigned long)arg2; \
2466 _argvec[3] = (unsigned long)arg3; \
2467 _argvec[4] = (unsigned long)arg4; \
2468 _argvec[5] = (unsigned long)arg5; \
2469 _argvec[6] = (unsigned long)arg6; \
2470 _argvec[7] = (unsigned long)arg7; \
2471 _argvec[8] = (unsigned long)arg8; \
2472 _argvec[9] = (unsigned long)arg9; \
2473 __asm__ volatile( \
2474 VALGRIND_ALIGN_STACK \
2475 "mr 11,%1\n\t" \
2476 "addi 1,1,-16\n\t" \
2477 /* arg9 */ \
2478 "lwz 3,36(11)\n\t" \
2479 "stw 3,8(1)\n\t" \
2480 /* args1-8 */ \
2481 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2482 "lwz 4,8(11)\n\t" \
2483 "lwz 5,12(11)\n\t" \
2484 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2485 "lwz 7,20(11)\n\t" \
2486 "lwz 8,24(11)\n\t" \
2487 "lwz 9,28(11)\n\t" \
2488 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2489 "lwz 11,0(11)\n\t" /* target->r11 */ \
2490 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2491 VALGRIND_RESTORE_STACK \
2492 "mr %0,3" \
2493 : /*out*/ "=r" (_res) \
2494 : /*in*/ "r" (&_argvec[0]) \
2495 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2496 ); \
2497 lval = (__typeof__(lval)) _res; \
2498 } while (0)
2499
2500#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2501 arg7,arg8,arg9,arg10) \
2502 do { \
2503 volatile OrigFn _orig = (orig); \
2504 volatile unsigned long _argvec[11]; \
2505 volatile unsigned long _res; \
2506 _argvec[0] = (unsigned long)_orig.nraddr; \
2507 _argvec[1] = (unsigned long)arg1; \
2508 _argvec[2] = (unsigned long)arg2; \
2509 _argvec[3] = (unsigned long)arg3; \
2510 _argvec[4] = (unsigned long)arg4; \
2511 _argvec[5] = (unsigned long)arg5; \
2512 _argvec[6] = (unsigned long)arg6; \
2513 _argvec[7] = (unsigned long)arg7; \
2514 _argvec[8] = (unsigned long)arg8; \
2515 _argvec[9] = (unsigned long)arg9; \
2516 _argvec[10] = (unsigned long)arg10; \
2517 __asm__ volatile( \
2518 VALGRIND_ALIGN_STACK \
2519 "mr 11,%1\n\t" \
2520 "addi 1,1,-16\n\t" \
2521 /* arg10 */ \
2522 "lwz 3,40(11)\n\t" \
2523 "stw 3,12(1)\n\t" \
2524 /* arg9 */ \
2525 "lwz 3,36(11)\n\t" \
2526 "stw 3,8(1)\n\t" \
2527 /* args1-8 */ \
2528 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2529 "lwz 4,8(11)\n\t" \
2530 "lwz 5,12(11)\n\t" \
2531 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2532 "lwz 7,20(11)\n\t" \
2533 "lwz 8,24(11)\n\t" \
2534 "lwz 9,28(11)\n\t" \
2535 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2536 "lwz 11,0(11)\n\t" /* target->r11 */ \
2537 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2538 VALGRIND_RESTORE_STACK \
2539 "mr %0,3" \
2540 : /*out*/ "=r" (_res) \
2541 : /*in*/ "r" (&_argvec[0]) \
2542 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2543 ); \
2544 lval = (__typeof__(lval)) _res; \
2545 } while (0)
2546
2547#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2548 arg7,arg8,arg9,arg10,arg11) \
2549 do { \
2550 volatile OrigFn _orig = (orig); \
2551 volatile unsigned long _argvec[12]; \
2552 volatile unsigned long _res; \
2553 _argvec[0] = (unsigned long)_orig.nraddr; \
2554 _argvec[1] = (unsigned long)arg1; \
2555 _argvec[2] = (unsigned long)arg2; \
2556 _argvec[3] = (unsigned long)arg3; \
2557 _argvec[4] = (unsigned long)arg4; \
2558 _argvec[5] = (unsigned long)arg5; \
2559 _argvec[6] = (unsigned long)arg6; \
2560 _argvec[7] = (unsigned long)arg7; \
2561 _argvec[8] = (unsigned long)arg8; \
2562 _argvec[9] = (unsigned long)arg9; \
2563 _argvec[10] = (unsigned long)arg10; \
2564 _argvec[11] = (unsigned long)arg11; \
2565 __asm__ volatile( \
2566 VALGRIND_ALIGN_STACK \
2567 "mr 11,%1\n\t" \
2568 "addi 1,1,-32\n\t" \
2569 /* arg11 */ \
2570 "lwz 3,44(11)\n\t" \
2571 "stw 3,16(1)\n\t" \
2572 /* arg10 */ \
2573 "lwz 3,40(11)\n\t" \
2574 "stw 3,12(1)\n\t" \
2575 /* arg9 */ \
2576 "lwz 3,36(11)\n\t" \
2577 "stw 3,8(1)\n\t" \
2578 /* args1-8 */ \
2579 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2580 "lwz 4,8(11)\n\t" \
2581 "lwz 5,12(11)\n\t" \
2582 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2583 "lwz 7,20(11)\n\t" \
2584 "lwz 8,24(11)\n\t" \
2585 "lwz 9,28(11)\n\t" \
2586 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2587 "lwz 11,0(11)\n\t" /* target->r11 */ \
2588 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2589 VALGRIND_RESTORE_STACK \
2590 "mr %0,3" \
2591 : /*out*/ "=r" (_res) \
2592 : /*in*/ "r" (&_argvec[0]) \
2593 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2594 ); \
2595 lval = (__typeof__(lval)) _res; \
2596 } while (0)
2597
2598#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2599 arg7,arg8,arg9,arg10,arg11,arg12) \
2600 do { \
2601 volatile OrigFn _orig = (orig); \
2602 volatile unsigned long _argvec[13]; \
2603 volatile unsigned long _res; \
2604 _argvec[0] = (unsigned long)_orig.nraddr; \
2605 _argvec[1] = (unsigned long)arg1; \
2606 _argvec[2] = (unsigned long)arg2; \
2607 _argvec[3] = (unsigned long)arg3; \
2608 _argvec[4] = (unsigned long)arg4; \
2609 _argvec[5] = (unsigned long)arg5; \
2610 _argvec[6] = (unsigned long)arg6; \
2611 _argvec[7] = (unsigned long)arg7; \
2612 _argvec[8] = (unsigned long)arg8; \
2613 _argvec[9] = (unsigned long)arg9; \
2614 _argvec[10] = (unsigned long)arg10; \
2615 _argvec[11] = (unsigned long)arg11; \
2616 _argvec[12] = (unsigned long)arg12; \
2617 __asm__ volatile( \
2618 VALGRIND_ALIGN_STACK \
2619 "mr 11,%1\n\t" \
2620 "addi 1,1,-32\n\t" \
2621 /* arg12 */ \
2622 "lwz 3,48(11)\n\t" \
2623 "stw 3,20(1)\n\t" \
2624 /* arg11 */ \
2625 "lwz 3,44(11)\n\t" \
2626 "stw 3,16(1)\n\t" \
2627 /* arg10 */ \
2628 "lwz 3,40(11)\n\t" \
2629 "stw 3,12(1)\n\t" \
2630 /* arg9 */ \
2631 "lwz 3,36(11)\n\t" \
2632 "stw 3,8(1)\n\t" \
2633 /* args1-8 */ \
2634 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2635 "lwz 4,8(11)\n\t" \
2636 "lwz 5,12(11)\n\t" \
2637 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2638 "lwz 7,20(11)\n\t" \
2639 "lwz 8,24(11)\n\t" \
2640 "lwz 9,28(11)\n\t" \
2641 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2642 "lwz 11,0(11)\n\t" /* target->r11 */ \
2643 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2644 VALGRIND_RESTORE_STACK \
2645 "mr %0,3" \
2646 : /*out*/ "=r" (_res) \
2647 : /*in*/ "r" (&_argvec[0]) \
2648 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2649 ); \
2650 lval = (__typeof__(lval)) _res; \
2651 } while (0)
2652
2653#endif /* PLAT_ppc32_linux */
2654
2655/* ------------------------ ppc64-linux ------------------------ */
2656
2657#if defined(PLAT_ppc64be_linux)
2658
2659/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2660
2661/* These regs are trashed by the hidden call. */
2662#define __CALLER_SAVED_REGS \
2663 "lr", "ctr", "xer", \
2664 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2665 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2666 "r11", "r12", "r13"
2667
2668/* Macros to save and align the stack before making a function
2669 call and restore it afterwards as gcc may not keep the stack
2670 pointer aligned if it doesn't realise calls are being made
2671 to other functions. */
2672
2673#define VALGRIND_ALIGN_STACK \
2674 "mr 28,1\n\t" \
2675 "rldicr 1,1,0,59\n\t"
2676#define VALGRIND_RESTORE_STACK \
2677 "mr 1,28\n\t"
2678
2679/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2680 long) == 8. */
2681
2682#define CALL_FN_W_v(lval, orig) \
2683 do { \
2684 volatile OrigFn _orig = (orig); \
2685 volatile unsigned long _argvec[3+0]; \
2686 volatile unsigned long _res; \
2687 /* _argvec[0] holds current r2 across the call */ \
2688 _argvec[1] = (unsigned long)_orig.r2; \
2689 _argvec[2] = (unsigned long)_orig.nraddr; \
2690 __asm__ volatile( \
2691 VALGRIND_ALIGN_STACK \
2692 "mr 11,%1\n\t" \
2693 "std 2,-16(11)\n\t" /* save tocptr */ \
2694 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2695 "ld 11, 0(11)\n\t" /* target->r11 */ \
2696 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2697 "mr 11,%1\n\t" \
2698 "mr %0,3\n\t" \
2699 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2700 VALGRIND_RESTORE_STACK \
2701 : /*out*/ "=r" (_res) \
2702 : /*in*/ "r" (&_argvec[2]) \
2703 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2704 ); \
2705 lval = (__typeof__(lval)) _res; \
2706 } while (0)
2707
2708#define CALL_FN_W_W(lval, orig, arg1) \
2709 do { \
2710 volatile OrigFn _orig = (orig); \
2711 volatile unsigned long _argvec[3+1]; \
2712 volatile unsigned long _res; \
2713 /* _argvec[0] holds current r2 across the call */ \
2714 _argvec[1] = (unsigned long)_orig.r2; \
2715 _argvec[2] = (unsigned long)_orig.nraddr; \
2716 _argvec[2+1] = (unsigned long)arg1; \
2717 __asm__ volatile( \
2718 VALGRIND_ALIGN_STACK \
2719 "mr 11,%1\n\t" \
2720 "std 2,-16(11)\n\t" /* save tocptr */ \
2721 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2722 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2723 "ld 11, 0(11)\n\t" /* target->r11 */ \
2724 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2725 "mr 11,%1\n\t" \
2726 "mr %0,3\n\t" \
2727 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2728 VALGRIND_RESTORE_STACK \
2729 : /*out*/ "=r" (_res) \
2730 : /*in*/ "r" (&_argvec[2]) \
2731 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2732 ); \
2733 lval = (__typeof__(lval)) _res; \
2734 } while (0)
2735
2736#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2737 do { \
2738 volatile OrigFn _orig = (orig); \
2739 volatile unsigned long _argvec[3+2]; \
2740 volatile unsigned long _res; \
2741 /* _argvec[0] holds current r2 across the call */ \
2742 _argvec[1] = (unsigned long)_orig.r2; \
2743 _argvec[2] = (unsigned long)_orig.nraddr; \
2744 _argvec[2+1] = (unsigned long)arg1; \
2745 _argvec[2+2] = (unsigned long)arg2; \
2746 __asm__ volatile( \
2747 VALGRIND_ALIGN_STACK \
2748 "mr 11,%1\n\t" \
2749 "std 2,-16(11)\n\t" /* save tocptr */ \
2750 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2751 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2752 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2753 "ld 11, 0(11)\n\t" /* target->r11 */ \
2754 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2755 "mr 11,%1\n\t" \
2756 "mr %0,3\n\t" \
2757 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2758 VALGRIND_RESTORE_STACK \
2759 : /*out*/ "=r" (_res) \
2760 : /*in*/ "r" (&_argvec[2]) \
2761 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2762 ); \
2763 lval = (__typeof__(lval)) _res; \
2764 } while (0)
2765
2766#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2767 do { \
2768 volatile OrigFn _orig = (orig); \
2769 volatile unsigned long _argvec[3+3]; \
2770 volatile unsigned long _res; \
2771 /* _argvec[0] holds current r2 across the call */ \
2772 _argvec[1] = (unsigned long)_orig.r2; \
2773 _argvec[2] = (unsigned long)_orig.nraddr; \
2774 _argvec[2+1] = (unsigned long)arg1; \
2775 _argvec[2+2] = (unsigned long)arg2; \
2776 _argvec[2+3] = (unsigned long)arg3; \
2777 __asm__ volatile( \
2778 VALGRIND_ALIGN_STACK \
2779 "mr 11,%1\n\t" \
2780 "std 2,-16(11)\n\t" /* save tocptr */ \
2781 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2782 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2783 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2784 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2785 "ld 11, 0(11)\n\t" /* target->r11 */ \
2786 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2787 "mr 11,%1\n\t" \
2788 "mr %0,3\n\t" \
2789 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2790 VALGRIND_RESTORE_STACK \
2791 : /*out*/ "=r" (_res) \
2792 : /*in*/ "r" (&_argvec[2]) \
2793 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2794 ); \
2795 lval = (__typeof__(lval)) _res; \
2796 } while (0)
2797
2798#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2799 do { \
2800 volatile OrigFn _orig = (orig); \
2801 volatile unsigned long _argvec[3+4]; \
2802 volatile unsigned long _res; \
2803 /* _argvec[0] holds current r2 across the call */ \
2804 _argvec[1] = (unsigned long)_orig.r2; \
2805 _argvec[2] = (unsigned long)_orig.nraddr; \
2806 _argvec[2+1] = (unsigned long)arg1; \
2807 _argvec[2+2] = (unsigned long)arg2; \
2808 _argvec[2+3] = (unsigned long)arg3; \
2809 _argvec[2+4] = (unsigned long)arg4; \
2810 __asm__ volatile( \
2811 VALGRIND_ALIGN_STACK \
2812 "mr 11,%1\n\t" \
2813 "std 2,-16(11)\n\t" /* save tocptr */ \
2814 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2815 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2816 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2817 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2818 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2819 "ld 11, 0(11)\n\t" /* target->r11 */ \
2820 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2821 "mr 11,%1\n\t" \
2822 "mr %0,3\n\t" \
2823 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2824 VALGRIND_RESTORE_STACK \
2825 : /*out*/ "=r" (_res) \
2826 : /*in*/ "r" (&_argvec[2]) \
2827 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2828 ); \
2829 lval = (__typeof__(lval)) _res; \
2830 } while (0)
2831
2832#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2833 do { \
2834 volatile OrigFn _orig = (orig); \
2835 volatile unsigned long _argvec[3+5]; \
2836 volatile unsigned long _res; \
2837 /* _argvec[0] holds current r2 across the call */ \
2838 _argvec[1] = (unsigned long)_orig.r2; \
2839 _argvec[2] = (unsigned long)_orig.nraddr; \
2840 _argvec[2+1] = (unsigned long)arg1; \
2841 _argvec[2+2] = (unsigned long)arg2; \
2842 _argvec[2+3] = (unsigned long)arg3; \
2843 _argvec[2+4] = (unsigned long)arg4; \
2844 _argvec[2+5] = (unsigned long)arg5; \
2845 __asm__ volatile( \
2846 VALGRIND_ALIGN_STACK \
2847 "mr 11,%1\n\t" \
2848 "std 2,-16(11)\n\t" /* save tocptr */ \
2849 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2850 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2851 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2852 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2853 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2854 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2855 "ld 11, 0(11)\n\t" /* target->r11 */ \
2856 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2857 "mr 11,%1\n\t" \
2858 "mr %0,3\n\t" \
2859 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2860 VALGRIND_RESTORE_STACK \
2861 : /*out*/ "=r" (_res) \
2862 : /*in*/ "r" (&_argvec[2]) \
2863 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2864 ); \
2865 lval = (__typeof__(lval)) _res; \
2866 } while (0)
2867
2868#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2869 do { \
2870 volatile OrigFn _orig = (orig); \
2871 volatile unsigned long _argvec[3+6]; \
2872 volatile unsigned long _res; \
2873 /* _argvec[0] holds current r2 across the call */ \
2874 _argvec[1] = (unsigned long)_orig.r2; \
2875 _argvec[2] = (unsigned long)_orig.nraddr; \
2876 _argvec[2+1] = (unsigned long)arg1; \
2877 _argvec[2+2] = (unsigned long)arg2; \
2878 _argvec[2+3] = (unsigned long)arg3; \
2879 _argvec[2+4] = (unsigned long)arg4; \
2880 _argvec[2+5] = (unsigned long)arg5; \
2881 _argvec[2+6] = (unsigned long)arg6; \
2882 __asm__ volatile( \
2883 VALGRIND_ALIGN_STACK \
2884 "mr 11,%1\n\t" \
2885 "std 2,-16(11)\n\t" /* save tocptr */ \
2886 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2887 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2888 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2889 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2890 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2891 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2892 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2893 "ld 11, 0(11)\n\t" /* target->r11 */ \
2894 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2895 "mr 11,%1\n\t" \
2896 "mr %0,3\n\t" \
2897 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2898 VALGRIND_RESTORE_STACK \
2899 : /*out*/ "=r" (_res) \
2900 : /*in*/ "r" (&_argvec[2]) \
2901 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2902 ); \
2903 lval = (__typeof__(lval)) _res; \
2904 } while (0)
2905
2906#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2907 arg7) \
2908 do { \
2909 volatile OrigFn _orig = (orig); \
2910 volatile unsigned long _argvec[3+7]; \
2911 volatile unsigned long _res; \
2912 /* _argvec[0] holds current r2 across the call */ \
2913 _argvec[1] = (unsigned long)_orig.r2; \
2914 _argvec[2] = (unsigned long)_orig.nraddr; \
2915 _argvec[2+1] = (unsigned long)arg1; \
2916 _argvec[2+2] = (unsigned long)arg2; \
2917 _argvec[2+3] = (unsigned long)arg3; \
2918 _argvec[2+4] = (unsigned long)arg4; \
2919 _argvec[2+5] = (unsigned long)arg5; \
2920 _argvec[2+6] = (unsigned long)arg6; \
2921 _argvec[2+7] = (unsigned long)arg7; \
2922 __asm__ volatile( \
2923 VALGRIND_ALIGN_STACK \
2924 "mr 11,%1\n\t" \
2925 "std 2,-16(11)\n\t" /* save tocptr */ \
2926 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2927 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2928 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2929 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2930 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2931 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2932 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2933 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2934 "ld 11, 0(11)\n\t" /* target->r11 */ \
2935 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2936 "mr 11,%1\n\t" \
2937 "mr %0,3\n\t" \
2938 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2939 VALGRIND_RESTORE_STACK \
2940 : /*out*/ "=r" (_res) \
2941 : /*in*/ "r" (&_argvec[2]) \
2942 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2943 ); \
2944 lval = (__typeof__(lval)) _res; \
2945 } while (0)
2946
2947#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2948 arg7,arg8) \
2949 do { \
2950 volatile OrigFn _orig = (orig); \
2951 volatile unsigned long _argvec[3+8]; \
2952 volatile unsigned long _res; \
2953 /* _argvec[0] holds current r2 across the call */ \
2954 _argvec[1] = (unsigned long)_orig.r2; \
2955 _argvec[2] = (unsigned long)_orig.nraddr; \
2956 _argvec[2+1] = (unsigned long)arg1; \
2957 _argvec[2+2] = (unsigned long)arg2; \
2958 _argvec[2+3] = (unsigned long)arg3; \
2959 _argvec[2+4] = (unsigned long)arg4; \
2960 _argvec[2+5] = (unsigned long)arg5; \
2961 _argvec[2+6] = (unsigned long)arg6; \
2962 _argvec[2+7] = (unsigned long)arg7; \
2963 _argvec[2+8] = (unsigned long)arg8; \
2964 __asm__ volatile( \
2965 VALGRIND_ALIGN_STACK \
2966 "mr 11,%1\n\t" \
2967 "std 2,-16(11)\n\t" /* save tocptr */ \
2968 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2969 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2970 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2971 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2972 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2973 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2974 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2975 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2976 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2977 "ld 11, 0(11)\n\t" /* target->r11 */ \
2978 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2979 "mr 11,%1\n\t" \
2980 "mr %0,3\n\t" \
2981 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2982 VALGRIND_RESTORE_STACK \
2983 : /*out*/ "=r" (_res) \
2984 : /*in*/ "r" (&_argvec[2]) \
2985 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2986 ); \
2987 lval = (__typeof__(lval)) _res; \
2988 } while (0)
2989
2990#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2991 arg7,arg8,arg9) \
2992 do { \
2993 volatile OrigFn _orig = (orig); \
2994 volatile unsigned long _argvec[3+9]; \
2995 volatile unsigned long _res; \
2996 /* _argvec[0] holds current r2 across the call */ \
2997 _argvec[1] = (unsigned long)_orig.r2; \
2998 _argvec[2] = (unsigned long)_orig.nraddr; \
2999 _argvec[2+1] = (unsigned long)arg1; \
3000 _argvec[2+2] = (unsigned long)arg2; \
3001 _argvec[2+3] = (unsigned long)arg3; \
3002 _argvec[2+4] = (unsigned long)arg4; \
3003 _argvec[2+5] = (unsigned long)arg5; \
3004 _argvec[2+6] = (unsigned long)arg6; \
3005 _argvec[2+7] = (unsigned long)arg7; \
3006 _argvec[2+8] = (unsigned long)arg8; \
3007 _argvec[2+9] = (unsigned long)arg9; \
3008 __asm__ volatile( \
3009 VALGRIND_ALIGN_STACK \
3010 "mr 11,%1\n\t" \
3011 "std 2,-16(11)\n\t" /* save tocptr */ \
3012 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3013 "addi 1,1,-128\n\t" /* expand stack frame */ \
3014 /* arg9 */ \
3015 "ld 3,72(11)\n\t" \
3016 "std 3,112(1)\n\t" \
3017 /* args1-8 */ \
3018 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3019 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3020 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3021 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3022 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3023 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3024 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3025 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3026 "ld 11, 0(11)\n\t" /* target->r11 */ \
3027 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3028 "mr 11,%1\n\t" \
3029 "mr %0,3\n\t" \
3030 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3031 VALGRIND_RESTORE_STACK \
3032 : /*out*/ "=r" (_res) \
3033 : /*in*/ "r" (&_argvec[2]) \
3034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3035 ); \
3036 lval = (__typeof__(lval)) _res; \
3037 } while (0)
3038
3039#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3040 arg7,arg8,arg9,arg10) \
3041 do { \
3042 volatile OrigFn _orig = (orig); \
3043 volatile unsigned long _argvec[3+10]; \
3044 volatile unsigned long _res; \
3045 /* _argvec[0] holds current r2 across the call */ \
3046 _argvec[1] = (unsigned long)_orig.r2; \
3047 _argvec[2] = (unsigned long)_orig.nraddr; \
3048 _argvec[2+1] = (unsigned long)arg1; \
3049 _argvec[2+2] = (unsigned long)arg2; \
3050 _argvec[2+3] = (unsigned long)arg3; \
3051 _argvec[2+4] = (unsigned long)arg4; \
3052 _argvec[2+5] = (unsigned long)arg5; \
3053 _argvec[2+6] = (unsigned long)arg6; \
3054 _argvec[2+7] = (unsigned long)arg7; \
3055 _argvec[2+8] = (unsigned long)arg8; \
3056 _argvec[2+9] = (unsigned long)arg9; \
3057 _argvec[2+10] = (unsigned long)arg10; \
3058 __asm__ volatile( \
3059 VALGRIND_ALIGN_STACK \
3060 "mr 11,%1\n\t" \
3061 "std 2,-16(11)\n\t" /* save tocptr */ \
3062 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3063 "addi 1,1,-128\n\t" /* expand stack frame */ \
3064 /* arg10 */ \
3065 "ld 3,80(11)\n\t" \
3066 "std 3,120(1)\n\t" \
3067 /* arg9 */ \
3068 "ld 3,72(11)\n\t" \
3069 "std 3,112(1)\n\t" \
3070 /* args1-8 */ \
3071 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3072 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3073 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3074 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3075 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3076 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3077 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3078 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3079 "ld 11, 0(11)\n\t" /* target->r11 */ \
3080 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3081 "mr 11,%1\n\t" \
3082 "mr %0,3\n\t" \
3083 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3084 VALGRIND_RESTORE_STACK \
3085 : /*out*/ "=r" (_res) \
3086 : /*in*/ "r" (&_argvec[2]) \
3087 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3088 ); \
3089 lval = (__typeof__(lval)) _res; \
3090 } while (0)
3091
3092#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3093 arg7,arg8,arg9,arg10,arg11) \
3094 do { \
3095 volatile OrigFn _orig = (orig); \
3096 volatile unsigned long _argvec[3+11]; \
3097 volatile unsigned long _res; \
3098 /* _argvec[0] holds current r2 across the call */ \
3099 _argvec[1] = (unsigned long)_orig.r2; \
3100 _argvec[2] = (unsigned long)_orig.nraddr; \
3101 _argvec[2+1] = (unsigned long)arg1; \
3102 _argvec[2+2] = (unsigned long)arg2; \
3103 _argvec[2+3] = (unsigned long)arg3; \
3104 _argvec[2+4] = (unsigned long)arg4; \
3105 _argvec[2+5] = (unsigned long)arg5; \
3106 _argvec[2+6] = (unsigned long)arg6; \
3107 _argvec[2+7] = (unsigned long)arg7; \
3108 _argvec[2+8] = (unsigned long)arg8; \
3109 _argvec[2+9] = (unsigned long)arg9; \
3110 _argvec[2+10] = (unsigned long)arg10; \
3111 _argvec[2+11] = (unsigned long)arg11; \
3112 __asm__ volatile( \
3113 VALGRIND_ALIGN_STACK \
3114 "mr 11,%1\n\t" \
3115 "std 2,-16(11)\n\t" /* save tocptr */ \
3116 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3117 "addi 1,1,-144\n\t" /* expand stack frame */ \
3118 /* arg11 */ \
3119 "ld 3,88(11)\n\t" \
3120 "std 3,128(1)\n\t" \
3121 /* arg10 */ \
3122 "ld 3,80(11)\n\t" \
3123 "std 3,120(1)\n\t" \
3124 /* arg9 */ \
3125 "ld 3,72(11)\n\t" \
3126 "std 3,112(1)\n\t" \
3127 /* args1-8 */ \
3128 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3129 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3130 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3131 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3132 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3133 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3134 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3135 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3136 "ld 11, 0(11)\n\t" /* target->r11 */ \
3137 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3138 "mr 11,%1\n\t" \
3139 "mr %0,3\n\t" \
3140 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3141 VALGRIND_RESTORE_STACK \
3142 : /*out*/ "=r" (_res) \
3143 : /*in*/ "r" (&_argvec[2]) \
3144 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3145 ); \
3146 lval = (__typeof__(lval)) _res; \
3147 } while (0)
3148
3149#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3150 arg7,arg8,arg9,arg10,arg11,arg12) \
3151 do { \
3152 volatile OrigFn _orig = (orig); \
3153 volatile unsigned long _argvec[3+12]; \
3154 volatile unsigned long _res; \
3155 /* _argvec[0] holds current r2 across the call */ \
3156 _argvec[1] = (unsigned long)_orig.r2; \
3157 _argvec[2] = (unsigned long)_orig.nraddr; \
3158 _argvec[2+1] = (unsigned long)arg1; \
3159 _argvec[2+2] = (unsigned long)arg2; \
3160 _argvec[2+3] = (unsigned long)arg3; \
3161 _argvec[2+4] = (unsigned long)arg4; \
3162 _argvec[2+5] = (unsigned long)arg5; \
3163 _argvec[2+6] = (unsigned long)arg6; \
3164 _argvec[2+7] = (unsigned long)arg7; \
3165 _argvec[2+8] = (unsigned long)arg8; \
3166 _argvec[2+9] = (unsigned long)arg9; \
3167 _argvec[2+10] = (unsigned long)arg10; \
3168 _argvec[2+11] = (unsigned long)arg11; \
3169 _argvec[2+12] = (unsigned long)arg12; \
3170 __asm__ volatile( \
3171 VALGRIND_ALIGN_STACK \
3172 "mr 11,%1\n\t" \
3173 "std 2,-16(11)\n\t" /* save tocptr */ \
3174 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3175 "addi 1,1,-144\n\t" /* expand stack frame */ \
3176 /* arg12 */ \
3177 "ld 3,96(11)\n\t" \
3178 "std 3,136(1)\n\t" \
3179 /* arg11 */ \
3180 "ld 3,88(11)\n\t" \
3181 "std 3,128(1)\n\t" \
3182 /* arg10 */ \
3183 "ld 3,80(11)\n\t" \
3184 "std 3,120(1)\n\t" \
3185 /* arg9 */ \
3186 "ld 3,72(11)\n\t" \
3187 "std 3,112(1)\n\t" \
3188 /* args1-8 */ \
3189 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3190 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3191 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3192 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3193 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3194 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3195 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3196 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3197 "ld 11, 0(11)\n\t" /* target->r11 */ \
3198 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3199 "mr 11,%1\n\t" \
3200 "mr %0,3\n\t" \
3201 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3202 VALGRIND_RESTORE_STACK \
3203 : /*out*/ "=r" (_res) \
3204 : /*in*/ "r" (&_argvec[2]) \
3205 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3206 ); \
3207 lval = (__typeof__(lval)) _res; \
3208 } while (0)
3209
3210#endif /* PLAT_ppc64be_linux */
3211
3212/* ------------------------- ppc64le-linux ----------------------- */
3213#if defined(PLAT_ppc64le_linux)
3214
3215/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3216
3217/* These regs are trashed by the hidden call. */
3218#define __CALLER_SAVED_REGS \
3219 "lr", "ctr", "xer", \
3220 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3221 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3222 "r11", "r12", "r13"
3223
3224/* Macros to save and align the stack before making a function
3225 call and restore it afterwards as gcc may not keep the stack
3226 pointer aligned if it doesn't realise calls are being made
3227 to other functions. */
3228
3229#define VALGRIND_ALIGN_STACK \
3230 "mr 28,1\n\t" \
3231 "rldicr 1,1,0,59\n\t"
3232#define VALGRIND_RESTORE_STACK \
3233 "mr 1,28\n\t"
3234
3235/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3236 long) == 8. */
3237
3238#define CALL_FN_W_v(lval, orig) \
3239 do { \
3240 volatile OrigFn _orig = (orig); \
3241 volatile unsigned long _argvec[3+0]; \
3242 volatile unsigned long _res; \
3243 /* _argvec[0] holds current r2 across the call */ \
3244 _argvec[1] = (unsigned long)_orig.r2; \
3245 _argvec[2] = (unsigned long)_orig.nraddr; \
3246 __asm__ volatile( \
3247 VALGRIND_ALIGN_STACK \
3248 "mr 12,%1\n\t" \
3249 "std 2,-16(12)\n\t" /* save tocptr */ \
3250 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3251 "ld 12, 0(12)\n\t" /* target->r12 */ \
3252 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3253 "mr 12,%1\n\t" \
3254 "mr %0,3\n\t" \
3255 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3256 VALGRIND_RESTORE_STACK \
3257 : /*out*/ "=r" (_res) \
3258 : /*in*/ "r" (&_argvec[2]) \
3259 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3260 ); \
3261 lval = (__typeof__(lval)) _res; \
3262 } while (0)
3263
3264#define CALL_FN_W_W(lval, orig, arg1) \
3265 do { \
3266 volatile OrigFn _orig = (orig); \
3267 volatile unsigned long _argvec[3+1]; \
3268 volatile unsigned long _res; \
3269 /* _argvec[0] holds current r2 across the call */ \
3270 _argvec[1] = (unsigned long)_orig.r2; \
3271 _argvec[2] = (unsigned long)_orig.nraddr; \
3272 _argvec[2+1] = (unsigned long)arg1; \
3273 __asm__ volatile( \
3274 VALGRIND_ALIGN_STACK \
3275 "mr 12,%1\n\t" \
3276 "std 2,-16(12)\n\t" /* save tocptr */ \
3277 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3278 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3279 "ld 12, 0(12)\n\t" /* target->r12 */ \
3280 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3281 "mr 12,%1\n\t" \
3282 "mr %0,3\n\t" \
3283 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3284 VALGRIND_RESTORE_STACK \
3285 : /*out*/ "=r" (_res) \
3286 : /*in*/ "r" (&_argvec[2]) \
3287 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3288 ); \
3289 lval = (__typeof__(lval)) _res; \
3290 } while (0)
3291
3292#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3293 do { \
3294 volatile OrigFn _orig = (orig); \
3295 volatile unsigned long _argvec[3+2]; \
3296 volatile unsigned long _res; \
3297 /* _argvec[0] holds current r2 across the call */ \
3298 _argvec[1] = (unsigned long)_orig.r2; \
3299 _argvec[2] = (unsigned long)_orig.nraddr; \
3300 _argvec[2+1] = (unsigned long)arg1; \
3301 _argvec[2+2] = (unsigned long)arg2; \
3302 __asm__ volatile( \
3303 VALGRIND_ALIGN_STACK \
3304 "mr 12,%1\n\t" \
3305 "std 2,-16(12)\n\t" /* save tocptr */ \
3306 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3307 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3308 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3309 "ld 12, 0(12)\n\t" /* target->r12 */ \
3310 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3311 "mr 12,%1\n\t" \
3312 "mr %0,3\n\t" \
3313 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3314 VALGRIND_RESTORE_STACK \
3315 : /*out*/ "=r" (_res) \
3316 : /*in*/ "r" (&_argvec[2]) \
3317 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3318 ); \
3319 lval = (__typeof__(lval)) _res; \
3320 } while (0)
3321
3322#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3323 do { \
3324 volatile OrigFn _orig = (orig); \
3325 volatile unsigned long _argvec[3+3]; \
3326 volatile unsigned long _res; \
3327 /* _argvec[0] holds current r2 across the call */ \
3328 _argvec[1] = (unsigned long)_orig.r2; \
3329 _argvec[2] = (unsigned long)_orig.nraddr; \
3330 _argvec[2+1] = (unsigned long)arg1; \
3331 _argvec[2+2] = (unsigned long)arg2; \
3332 _argvec[2+3] = (unsigned long)arg3; \
3333 __asm__ volatile( \
3334 VALGRIND_ALIGN_STACK \
3335 "mr 12,%1\n\t" \
3336 "std 2,-16(12)\n\t" /* save tocptr */ \
3337 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3338 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3339 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3340 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3341 "ld 12, 0(12)\n\t" /* target->r12 */ \
3342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3343 "mr 12,%1\n\t" \
3344 "mr %0,3\n\t" \
3345 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3346 VALGRIND_RESTORE_STACK \
3347 : /*out*/ "=r" (_res) \
3348 : /*in*/ "r" (&_argvec[2]) \
3349 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3350 ); \
3351 lval = (__typeof__(lval)) _res; \
3352 } while (0)
3353
3354#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3355 do { \
3356 volatile OrigFn _orig = (orig); \
3357 volatile unsigned long _argvec[3+4]; \
3358 volatile unsigned long _res; \
3359 /* _argvec[0] holds current r2 across the call */ \
3360 _argvec[1] = (unsigned long)_orig.r2; \
3361 _argvec[2] = (unsigned long)_orig.nraddr; \
3362 _argvec[2+1] = (unsigned long)arg1; \
3363 _argvec[2+2] = (unsigned long)arg2; \
3364 _argvec[2+3] = (unsigned long)arg3; \
3365 _argvec[2+4] = (unsigned long)arg4; \
3366 __asm__ volatile( \
3367 VALGRIND_ALIGN_STACK \
3368 "mr 12,%1\n\t" \
3369 "std 2,-16(12)\n\t" /* save tocptr */ \
3370 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3371 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3372 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3373 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3374 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3375 "ld 12, 0(12)\n\t" /* target->r12 */ \
3376 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3377 "mr 12,%1\n\t" \
3378 "mr %0,3\n\t" \
3379 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3380 VALGRIND_RESTORE_STACK \
3381 : /*out*/ "=r" (_res) \
3382 : /*in*/ "r" (&_argvec[2]) \
3383 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3384 ); \
3385 lval = (__typeof__(lval)) _res; \
3386 } while (0)
3387
3388#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3389 do { \
3390 volatile OrigFn _orig = (orig); \
3391 volatile unsigned long _argvec[3+5]; \
3392 volatile unsigned long _res; \
3393 /* _argvec[0] holds current r2 across the call */ \
3394 _argvec[1] = (unsigned long)_orig.r2; \
3395 _argvec[2] = (unsigned long)_orig.nraddr; \
3396 _argvec[2+1] = (unsigned long)arg1; \
3397 _argvec[2+2] = (unsigned long)arg2; \
3398 _argvec[2+3] = (unsigned long)arg3; \
3399 _argvec[2+4] = (unsigned long)arg4; \
3400 _argvec[2+5] = (unsigned long)arg5; \
3401 __asm__ volatile( \
3402 VALGRIND_ALIGN_STACK \
3403 "mr 12,%1\n\t" \
3404 "std 2,-16(12)\n\t" /* save tocptr */ \
3405 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3406 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3407 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3408 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3409 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3410 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3411 "ld 12, 0(12)\n\t" /* target->r12 */ \
3412 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3413 "mr 12,%1\n\t" \
3414 "mr %0,3\n\t" \
3415 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3416 VALGRIND_RESTORE_STACK \
3417 : /*out*/ "=r" (_res) \
3418 : /*in*/ "r" (&_argvec[2]) \
3419 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3420 ); \
3421 lval = (__typeof__(lval)) _res; \
3422 } while (0)
3423
3424#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3425 do { \
3426 volatile OrigFn _orig = (orig); \
3427 volatile unsigned long _argvec[3+6]; \
3428 volatile unsigned long _res; \
3429 /* _argvec[0] holds current r2 across the call */ \
3430 _argvec[1] = (unsigned long)_orig.r2; \
3431 _argvec[2] = (unsigned long)_orig.nraddr; \
3432 _argvec[2+1] = (unsigned long)arg1; \
3433 _argvec[2+2] = (unsigned long)arg2; \
3434 _argvec[2+3] = (unsigned long)arg3; \
3435 _argvec[2+4] = (unsigned long)arg4; \
3436 _argvec[2+5] = (unsigned long)arg5; \
3437 _argvec[2+6] = (unsigned long)arg6; \
3438 __asm__ volatile( \
3439 VALGRIND_ALIGN_STACK \
3440 "mr 12,%1\n\t" \
3441 "std 2,-16(12)\n\t" /* save tocptr */ \
3442 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3443 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3444 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3445 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3446 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3447 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3448 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3449 "ld 12, 0(12)\n\t" /* target->r12 */ \
3450 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3451 "mr 12,%1\n\t" \
3452 "mr %0,3\n\t" \
3453 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3454 VALGRIND_RESTORE_STACK \
3455 : /*out*/ "=r" (_res) \
3456 : /*in*/ "r" (&_argvec[2]) \
3457 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3458 ); \
3459 lval = (__typeof__(lval)) _res; \
3460 } while (0)
3461
3462#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3463 arg7) \
3464 do { \
3465 volatile OrigFn _orig = (orig); \
3466 volatile unsigned long _argvec[3+7]; \
3467 volatile unsigned long _res; \
3468 /* _argvec[0] holds current r2 across the call */ \
3469 _argvec[1] = (unsigned long)_orig.r2; \
3470 _argvec[2] = (unsigned long)_orig.nraddr; \
3471 _argvec[2+1] = (unsigned long)arg1; \
3472 _argvec[2+2] = (unsigned long)arg2; \
3473 _argvec[2+3] = (unsigned long)arg3; \
3474 _argvec[2+4] = (unsigned long)arg4; \
3475 _argvec[2+5] = (unsigned long)arg5; \
3476 _argvec[2+6] = (unsigned long)arg6; \
3477 _argvec[2+7] = (unsigned long)arg7; \
3478 __asm__ volatile( \
3479 VALGRIND_ALIGN_STACK \
3480 "mr 12,%1\n\t" \
3481 "std 2,-16(12)\n\t" /* save tocptr */ \
3482 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3483 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3484 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3485 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3486 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3487 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3488 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3489 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3490 "ld 12, 0(12)\n\t" /* target->r12 */ \
3491 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3492 "mr 12,%1\n\t" \
3493 "mr %0,3\n\t" \
3494 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3495 VALGRIND_RESTORE_STACK \
3496 : /*out*/ "=r" (_res) \
3497 : /*in*/ "r" (&_argvec[2]) \
3498 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3499 ); \
3500 lval = (__typeof__(lval)) _res; \
3501 } while (0)
3502
3503#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3504 arg7,arg8) \
3505 do { \
3506 volatile OrigFn _orig = (orig); \
3507 volatile unsigned long _argvec[3+8]; \
3508 volatile unsigned long _res; \
3509 /* _argvec[0] holds current r2 across the call */ \
3510 _argvec[1] = (unsigned long)_orig.r2; \
3511 _argvec[2] = (unsigned long)_orig.nraddr; \
3512 _argvec[2+1] = (unsigned long)arg1; \
3513 _argvec[2+2] = (unsigned long)arg2; \
3514 _argvec[2+3] = (unsigned long)arg3; \
3515 _argvec[2+4] = (unsigned long)arg4; \
3516 _argvec[2+5] = (unsigned long)arg5; \
3517 _argvec[2+6] = (unsigned long)arg6; \
3518 _argvec[2+7] = (unsigned long)arg7; \
3519 _argvec[2+8] = (unsigned long)arg8; \
3520 __asm__ volatile( \
3521 VALGRIND_ALIGN_STACK \
3522 "mr 12,%1\n\t" \
3523 "std 2,-16(12)\n\t" /* save tocptr */ \
3524 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3525 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3526 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3527 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3528 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3529 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3530 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3531 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3532 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3533 "ld 12, 0(12)\n\t" /* target->r12 */ \
3534 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3535 "mr 12,%1\n\t" \
3536 "mr %0,3\n\t" \
3537 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3538 VALGRIND_RESTORE_STACK \
3539 : /*out*/ "=r" (_res) \
3540 : /*in*/ "r" (&_argvec[2]) \
3541 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3542 ); \
3543 lval = (__typeof__(lval)) _res; \
3544 } while (0)
3545
3546#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3547 arg7,arg8,arg9) \
3548 do { \
3549 volatile OrigFn _orig = (orig); \
3550 volatile unsigned long _argvec[3+9]; \
3551 volatile unsigned long _res; \
3552 /* _argvec[0] holds current r2 across the call */ \
3553 _argvec[1] = (unsigned long)_orig.r2; \
3554 _argvec[2] = (unsigned long)_orig.nraddr; \
3555 _argvec[2+1] = (unsigned long)arg1; \
3556 _argvec[2+2] = (unsigned long)arg2; \
3557 _argvec[2+3] = (unsigned long)arg3; \
3558 _argvec[2+4] = (unsigned long)arg4; \
3559 _argvec[2+5] = (unsigned long)arg5; \
3560 _argvec[2+6] = (unsigned long)arg6; \
3561 _argvec[2+7] = (unsigned long)arg7; \
3562 _argvec[2+8] = (unsigned long)arg8; \
3563 _argvec[2+9] = (unsigned long)arg9; \
3564 __asm__ volatile( \
3565 VALGRIND_ALIGN_STACK \
3566 "mr 12,%1\n\t" \
3567 "std 2,-16(12)\n\t" /* save tocptr */ \
3568 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3569 "addi 1,1,-128\n\t" /* expand stack frame */ \
3570 /* arg9 */ \
3571 "ld 3,72(12)\n\t" \
3572 "std 3,96(1)\n\t" \
3573 /* args1-8 */ \
3574 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3575 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3576 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3577 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3578 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3579 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3580 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3581 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3582 "ld 12, 0(12)\n\t" /* target->r12 */ \
3583 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3584 "mr 12,%1\n\t" \
3585 "mr %0,3\n\t" \
3586 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3587 VALGRIND_RESTORE_STACK \
3588 : /*out*/ "=r" (_res) \
3589 : /*in*/ "r" (&_argvec[2]) \
3590 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3591 ); \
3592 lval = (__typeof__(lval)) _res; \
3593 } while (0)
3594
3595#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3596 arg7,arg8,arg9,arg10) \
3597 do { \
3598 volatile OrigFn _orig = (orig); \
3599 volatile unsigned long _argvec[3+10]; \
3600 volatile unsigned long _res; \
3601 /* _argvec[0] holds current r2 across the call */ \
3602 _argvec[1] = (unsigned long)_orig.r2; \
3603 _argvec[2] = (unsigned long)_orig.nraddr; \
3604 _argvec[2+1] = (unsigned long)arg1; \
3605 _argvec[2+2] = (unsigned long)arg2; \
3606 _argvec[2+3] = (unsigned long)arg3; \
3607 _argvec[2+4] = (unsigned long)arg4; \
3608 _argvec[2+5] = (unsigned long)arg5; \
3609 _argvec[2+6] = (unsigned long)arg6; \
3610 _argvec[2+7] = (unsigned long)arg7; \
3611 _argvec[2+8] = (unsigned long)arg8; \
3612 _argvec[2+9] = (unsigned long)arg9; \
3613 _argvec[2+10] = (unsigned long)arg10; \
3614 __asm__ volatile( \
3615 VALGRIND_ALIGN_STACK \
3616 "mr 12,%1\n\t" \
3617 "std 2,-16(12)\n\t" /* save tocptr */ \
3618 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3619 "addi 1,1,-128\n\t" /* expand stack frame */ \
3620 /* arg10 */ \
3621 "ld 3,80(12)\n\t" \
3622 "std 3,104(1)\n\t" \
3623 /* arg9 */ \
3624 "ld 3,72(12)\n\t" \
3625 "std 3,96(1)\n\t" \
3626 /* args1-8 */ \
3627 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3628 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3629 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3630 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3631 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3632 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3633 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3634 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3635 "ld 12, 0(12)\n\t" /* target->r12 */ \
3636 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3637 "mr 12,%1\n\t" \
3638 "mr %0,3\n\t" \
3639 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3640 VALGRIND_RESTORE_STACK \
3641 : /*out*/ "=r" (_res) \
3642 : /*in*/ "r" (&_argvec[2]) \
3643 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3644 ); \
3645 lval = (__typeof__(lval)) _res; \
3646 } while (0)
3647
3648#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3649 arg7,arg8,arg9,arg10,arg11) \
3650 do { \
3651 volatile OrigFn _orig = (orig); \
3652 volatile unsigned long _argvec[3+11]; \
3653 volatile unsigned long _res; \
3654 /* _argvec[0] holds current r2 across the call */ \
3655 _argvec[1] = (unsigned long)_orig.r2; \
3656 _argvec[2] = (unsigned long)_orig.nraddr; \
3657 _argvec[2+1] = (unsigned long)arg1; \
3658 _argvec[2+2] = (unsigned long)arg2; \
3659 _argvec[2+3] = (unsigned long)arg3; \
3660 _argvec[2+4] = (unsigned long)arg4; \
3661 _argvec[2+5] = (unsigned long)arg5; \
3662 _argvec[2+6] = (unsigned long)arg6; \
3663 _argvec[2+7] = (unsigned long)arg7; \
3664 _argvec[2+8] = (unsigned long)arg8; \
3665 _argvec[2+9] = (unsigned long)arg9; \
3666 _argvec[2+10] = (unsigned long)arg10; \
3667 _argvec[2+11] = (unsigned long)arg11; \
3668 __asm__ volatile( \
3669 VALGRIND_ALIGN_STACK \
3670 "mr 12,%1\n\t" \
3671 "std 2,-16(12)\n\t" /* save tocptr */ \
3672 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3673 "addi 1,1,-144\n\t" /* expand stack frame */ \
3674 /* arg11 */ \
3675 "ld 3,88(12)\n\t" \
3676 "std 3,112(1)\n\t" \
3677 /* arg10 */ \
3678 "ld 3,80(12)\n\t" \
3679 "std 3,104(1)\n\t" \
3680 /* arg9 */ \
3681 "ld 3,72(12)\n\t" \
3682 "std 3,96(1)\n\t" \
3683 /* args1-8 */ \
3684 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3685 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3686 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3687 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3688 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3689 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3690 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3691 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3692 "ld 12, 0(12)\n\t" /* target->r12 */ \
3693 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3694 "mr 12,%1\n\t" \
3695 "mr %0,3\n\t" \
3696 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3697 VALGRIND_RESTORE_STACK \
3698 : /*out*/ "=r" (_res) \
3699 : /*in*/ "r" (&_argvec[2]) \
3700 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3701 ); \
3702 lval = (__typeof__(lval)) _res; \
3703 } while (0)
3704
3705#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3706 arg7,arg8,arg9,arg10,arg11,arg12) \
3707 do { \
3708 volatile OrigFn _orig = (orig); \
3709 volatile unsigned long _argvec[3+12]; \
3710 volatile unsigned long _res; \
3711 /* _argvec[0] holds current r2 across the call */ \
3712 _argvec[1] = (unsigned long)_orig.r2; \
3713 _argvec[2] = (unsigned long)_orig.nraddr; \
3714 _argvec[2+1] = (unsigned long)arg1; \
3715 _argvec[2+2] = (unsigned long)arg2; \
3716 _argvec[2+3] = (unsigned long)arg3; \
3717 _argvec[2+4] = (unsigned long)arg4; \
3718 _argvec[2+5] = (unsigned long)arg5; \
3719 _argvec[2+6] = (unsigned long)arg6; \
3720 _argvec[2+7] = (unsigned long)arg7; \
3721 _argvec[2+8] = (unsigned long)arg8; \
3722 _argvec[2+9] = (unsigned long)arg9; \
3723 _argvec[2+10] = (unsigned long)arg10; \
3724 _argvec[2+11] = (unsigned long)arg11; \
3725 _argvec[2+12] = (unsigned long)arg12; \
3726 __asm__ volatile( \
3727 VALGRIND_ALIGN_STACK \
3728 "mr 12,%1\n\t" \
3729 "std 2,-16(12)\n\t" /* save tocptr */ \
3730 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3731 "addi 1,1,-144\n\t" /* expand stack frame */ \
3732 /* arg12 */ \
3733 "ld 3,96(12)\n\t" \
3734 "std 3,120(1)\n\t" \
3735 /* arg11 */ \
3736 "ld 3,88(12)\n\t" \
3737 "std 3,112(1)\n\t" \
3738 /* arg10 */ \
3739 "ld 3,80(12)\n\t" \
3740 "std 3,104(1)\n\t" \
3741 /* arg9 */ \
3742 "ld 3,72(12)\n\t" \
3743 "std 3,96(1)\n\t" \
3744 /* args1-8 */ \
3745 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3746 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3747 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3748 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3749 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3750 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3751 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3752 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3753 "ld 12, 0(12)\n\t" /* target->r12 */ \
3754 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3755 "mr 12,%1\n\t" \
3756 "mr %0,3\n\t" \
3757 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3758 VALGRIND_RESTORE_STACK \
3759 : /*out*/ "=r" (_res) \
3760 : /*in*/ "r" (&_argvec[2]) \
3761 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3762 ); \
3763 lval = (__typeof__(lval)) _res; \
3764 } while (0)
3765
3766#endif /* PLAT_ppc64le_linux */
3767
3768/* ------------------------- arm-linux ------------------------- */
3769
3770#if defined(PLAT_arm_linux)
3771
3772/* These regs are trashed by the hidden call. */
3773#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3774
3775/* Macros to save and align the stack before making a function
3776 call and restore it afterwards as gcc may not keep the stack
3777 pointer aligned if it doesn't realise calls are being made
3778 to other functions. */
3779
3780/* This is a bit tricky. We store the original stack pointer in r10
3781 as it is callee-saves. gcc doesn't allow the use of r11 for some
3782 reason. Also, we can't directly "bic" the stack pointer in thumb
3783 mode since r13 isn't an allowed register number in that context.
3784 So use r4 as a temporary, since that is about to get trashed
3785 anyway, just after each use of this macro. Side effect is we need
3786 to be very careful about any future changes, since
3787 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3788#define VALGRIND_ALIGN_STACK \
3789 "mov r10, sp\n\t" \
3790 "mov r4, sp\n\t" \
3791 "bic r4, r4, #7\n\t" \
3792 "mov sp, r4\n\t"
3793#define VALGRIND_RESTORE_STACK \
3794 "mov sp, r10\n\t"
3795
3796/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3797 long) == 4. */
3798
3799#define CALL_FN_W_v(lval, orig) \
3800 do { \
3801 volatile OrigFn _orig = (orig); \
3802 volatile unsigned long _argvec[1]; \
3803 volatile unsigned long _res; \
3804 _argvec[0] = (unsigned long)_orig.nraddr; \
3805 __asm__ volatile( \
3806 VALGRIND_ALIGN_STACK \
3807 "ldr r4, [%1] \n\t" /* target->r4 */ \
3808 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3809 VALGRIND_RESTORE_STACK \
3810 "mov %0, r0\n" \
3811 : /*out*/ "=r" (_res) \
3812 : /*in*/ "0" (&_argvec[0]) \
3813 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3814 ); \
3815 lval = (__typeof__(lval)) _res; \
3816 } while (0)
3817
3818#define CALL_FN_W_W(lval, orig, arg1) \
3819 do { \
3820 volatile OrigFn _orig = (orig); \
3821 volatile unsigned long _argvec[2]; \
3822 volatile unsigned long _res; \
3823 _argvec[0] = (unsigned long)_orig.nraddr; \
3824 _argvec[1] = (unsigned long)(arg1); \
3825 __asm__ volatile( \
3826 VALGRIND_ALIGN_STACK \
3827 "ldr r0, [%1, #4] \n\t" \
3828 "ldr r4, [%1] \n\t" /* target->r4 */ \
3829 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3830 VALGRIND_RESTORE_STACK \
3831 "mov %0, r0\n" \
3832 : /*out*/ "=r" (_res) \
3833 : /*in*/ "0" (&_argvec[0]) \
3834 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3835 ); \
3836 lval = (__typeof__(lval)) _res; \
3837 } while (0)
3838
3839#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3840 do { \
3841 volatile OrigFn _orig = (orig); \
3842 volatile unsigned long _argvec[3]; \
3843 volatile unsigned long _res; \
3844 _argvec[0] = (unsigned long)_orig.nraddr; \
3845 _argvec[1] = (unsigned long)(arg1); \
3846 _argvec[2] = (unsigned long)(arg2); \
3847 __asm__ volatile( \
3848 VALGRIND_ALIGN_STACK \
3849 "ldr r0, [%1, #4] \n\t" \
3850 "ldr r1, [%1, #8] \n\t" \
3851 "ldr r4, [%1] \n\t" /* target->r4 */ \
3852 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3853 VALGRIND_RESTORE_STACK \
3854 "mov %0, r0\n" \
3855 : /*out*/ "=r" (_res) \
3856 : /*in*/ "0" (&_argvec[0]) \
3857 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3858 ); \
3859 lval = (__typeof__(lval)) _res; \
3860 } while (0)
3861
3862#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3863 do { \
3864 volatile OrigFn _orig = (orig); \
3865 volatile unsigned long _argvec[4]; \
3866 volatile unsigned long _res; \
3867 _argvec[0] = (unsigned long)_orig.nraddr; \
3868 _argvec[1] = (unsigned long)(arg1); \
3869 _argvec[2] = (unsigned long)(arg2); \
3870 _argvec[3] = (unsigned long)(arg3); \
3871 __asm__ volatile( \
3872 VALGRIND_ALIGN_STACK \
3873 "ldr r0, [%1, #4] \n\t" \
3874 "ldr r1, [%1, #8] \n\t" \
3875 "ldr r2, [%1, #12] \n\t" \
3876 "ldr r4, [%1] \n\t" /* target->r4 */ \
3877 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3878 VALGRIND_RESTORE_STACK \
3879 "mov %0, r0\n" \
3880 : /*out*/ "=r" (_res) \
3881 : /*in*/ "0" (&_argvec[0]) \
3882 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3883 ); \
3884 lval = (__typeof__(lval)) _res; \
3885 } while (0)
3886
3887#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3888 do { \
3889 volatile OrigFn _orig = (orig); \
3890 volatile unsigned long _argvec[5]; \
3891 volatile unsigned long _res; \
3892 _argvec[0] = (unsigned long)_orig.nraddr; \
3893 _argvec[1] = (unsigned long)(arg1); \
3894 _argvec[2] = (unsigned long)(arg2); \
3895 _argvec[3] = (unsigned long)(arg3); \
3896 _argvec[4] = (unsigned long)(arg4); \
3897 __asm__ volatile( \
3898 VALGRIND_ALIGN_STACK \
3899 "ldr r0, [%1, #4] \n\t" \
3900 "ldr r1, [%1, #8] \n\t" \
3901 "ldr r2, [%1, #12] \n\t" \
3902 "ldr r3, [%1, #16] \n\t" \
3903 "ldr r4, [%1] \n\t" /* target->r4 */ \
3904 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3905 VALGRIND_RESTORE_STACK \
3906 "mov %0, r0" \
3907 : /*out*/ "=r" (_res) \
3908 : /*in*/ "0" (&_argvec[0]) \
3909 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3910 ); \
3911 lval = (__typeof__(lval)) _res; \
3912 } while (0)
3913
3914#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3915 do { \
3916 volatile OrigFn _orig = (orig); \
3917 volatile unsigned long _argvec[6]; \
3918 volatile unsigned long _res; \
3919 _argvec[0] = (unsigned long)_orig.nraddr; \
3920 _argvec[1] = (unsigned long)(arg1); \
3921 _argvec[2] = (unsigned long)(arg2); \
3922 _argvec[3] = (unsigned long)(arg3); \
3923 _argvec[4] = (unsigned long)(arg4); \
3924 _argvec[5] = (unsigned long)(arg5); \
3925 __asm__ volatile( \
3926 VALGRIND_ALIGN_STACK \
3927 "sub sp, sp, #4 \n\t" \
3928 "ldr r0, [%1, #20] \n\t" \
3929 "push {r0} \n\t" \
3930 "ldr r0, [%1, #4] \n\t" \
3931 "ldr r1, [%1, #8] \n\t" \
3932 "ldr r2, [%1, #12] \n\t" \
3933 "ldr r3, [%1, #16] \n\t" \
3934 "ldr r4, [%1] \n\t" /* target->r4 */ \
3935 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3936 VALGRIND_RESTORE_STACK \
3937 "mov %0, r0" \
3938 : /*out*/ "=r" (_res) \
3939 : /*in*/ "0" (&_argvec[0]) \
3940 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3941 ); \
3942 lval = (__typeof__(lval)) _res; \
3943 } while (0)
3944
3945#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3946 do { \
3947 volatile OrigFn _orig = (orig); \
3948 volatile unsigned long _argvec[7]; \
3949 volatile unsigned long _res; \
3950 _argvec[0] = (unsigned long)_orig.nraddr; \
3951 _argvec[1] = (unsigned long)(arg1); \
3952 _argvec[2] = (unsigned long)(arg2); \
3953 _argvec[3] = (unsigned long)(arg3); \
3954 _argvec[4] = (unsigned long)(arg4); \
3955 _argvec[5] = (unsigned long)(arg5); \
3956 _argvec[6] = (unsigned long)(arg6); \
3957 __asm__ volatile( \
3958 VALGRIND_ALIGN_STACK \
3959 "ldr r0, [%1, #20] \n\t" \
3960 "ldr r1, [%1, #24] \n\t" \
3961 "push {r0, r1} \n\t" \
3962 "ldr r0, [%1, #4] \n\t" \
3963 "ldr r1, [%1, #8] \n\t" \
3964 "ldr r2, [%1, #12] \n\t" \
3965 "ldr r3, [%1, #16] \n\t" \
3966 "ldr r4, [%1] \n\t" /* target->r4 */ \
3967 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3968 VALGRIND_RESTORE_STACK \
3969 "mov %0, r0" \
3970 : /*out*/ "=r" (_res) \
3971 : /*in*/ "0" (&_argvec[0]) \
3972 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3973 ); \
3974 lval = (__typeof__(lval)) _res; \
3975 } while (0)
3976
3977#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3978 arg7) \
3979 do { \
3980 volatile OrigFn _orig = (orig); \
3981 volatile unsigned long _argvec[8]; \
3982 volatile unsigned long _res; \
3983 _argvec[0] = (unsigned long)_orig.nraddr; \
3984 _argvec[1] = (unsigned long)(arg1); \
3985 _argvec[2] = (unsigned long)(arg2); \
3986 _argvec[3] = (unsigned long)(arg3); \
3987 _argvec[4] = (unsigned long)(arg4); \
3988 _argvec[5] = (unsigned long)(arg5); \
3989 _argvec[6] = (unsigned long)(arg6); \
3990 _argvec[7] = (unsigned long)(arg7); \
3991 __asm__ volatile( \
3992 VALGRIND_ALIGN_STACK \
3993 "sub sp, sp, #4 \n\t" \
3994 "ldr r0, [%1, #20] \n\t" \
3995 "ldr r1, [%1, #24] \n\t" \
3996 "ldr r2, [%1, #28] \n\t" \
3997 "push {r0, r1, r2} \n\t" \
3998 "ldr r0, [%1, #4] \n\t" \
3999 "ldr r1, [%1, #8] \n\t" \
4000 "ldr r2, [%1, #12] \n\t" \
4001 "ldr r3, [%1, #16] \n\t" \
4002 "ldr r4, [%1] \n\t" /* target->r4 */ \
4003 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4004 VALGRIND_RESTORE_STACK \
4005 "mov %0, r0" \
4006 : /*out*/ "=r" (_res) \
4007 : /*in*/ "0" (&_argvec[0]) \
4008 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4009 ); \
4010 lval = (__typeof__(lval)) _res; \
4011 } while (0)
4012
4013#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4014 arg7,arg8) \
4015 do { \
4016 volatile OrigFn _orig = (orig); \
4017 volatile unsigned long _argvec[9]; \
4018 volatile unsigned long _res; \
4019 _argvec[0] = (unsigned long)_orig.nraddr; \
4020 _argvec[1] = (unsigned long)(arg1); \
4021 _argvec[2] = (unsigned long)(arg2); \
4022 _argvec[3] = (unsigned long)(arg3); \
4023 _argvec[4] = (unsigned long)(arg4); \
4024 _argvec[5] = (unsigned long)(arg5); \
4025 _argvec[6] = (unsigned long)(arg6); \
4026 _argvec[7] = (unsigned long)(arg7); \
4027 _argvec[8] = (unsigned long)(arg8); \
4028 __asm__ volatile( \
4029 VALGRIND_ALIGN_STACK \
4030 "ldr r0, [%1, #20] \n\t" \
4031 "ldr r1, [%1, #24] \n\t" \
4032 "ldr r2, [%1, #28] \n\t" \
4033 "ldr r3, [%1, #32] \n\t" \
4034 "push {r0, r1, r2, r3} \n\t" \
4035 "ldr r0, [%1, #4] \n\t" \
4036 "ldr r1, [%1, #8] \n\t" \
4037 "ldr r2, [%1, #12] \n\t" \
4038 "ldr r3, [%1, #16] \n\t" \
4039 "ldr r4, [%1] \n\t" /* target->r4 */ \
4040 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4041 VALGRIND_RESTORE_STACK \
4042 "mov %0, r0" \
4043 : /*out*/ "=r" (_res) \
4044 : /*in*/ "0" (&_argvec[0]) \
4045 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4046 ); \
4047 lval = (__typeof__(lval)) _res; \
4048 } while (0)
4049
4050#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4051 arg7,arg8,arg9) \
4052 do { \
4053 volatile OrigFn _orig = (orig); \
4054 volatile unsigned long _argvec[10]; \
4055 volatile unsigned long _res; \
4056 _argvec[0] = (unsigned long)_orig.nraddr; \
4057 _argvec[1] = (unsigned long)(arg1); \
4058 _argvec[2] = (unsigned long)(arg2); \
4059 _argvec[3] = (unsigned long)(arg3); \
4060 _argvec[4] = (unsigned long)(arg4); \
4061 _argvec[5] = (unsigned long)(arg5); \
4062 _argvec[6] = (unsigned long)(arg6); \
4063 _argvec[7] = (unsigned long)(arg7); \
4064 _argvec[8] = (unsigned long)(arg8); \
4065 _argvec[9] = (unsigned long)(arg9); \
4066 __asm__ volatile( \
4067 VALGRIND_ALIGN_STACK \
4068 "sub sp, sp, #4 \n\t" \
4069 "ldr r0, [%1, #20] \n\t" \
4070 "ldr r1, [%1, #24] \n\t" \
4071 "ldr r2, [%1, #28] \n\t" \
4072 "ldr r3, [%1, #32] \n\t" \
4073 "ldr r4, [%1, #36] \n\t" \
4074 "push {r0, r1, r2, r3, r4} \n\t" \
4075 "ldr r0, [%1, #4] \n\t" \
4076 "ldr r1, [%1, #8] \n\t" \
4077 "ldr r2, [%1, #12] \n\t" \
4078 "ldr r3, [%1, #16] \n\t" \
4079 "ldr r4, [%1] \n\t" /* target->r4 */ \
4080 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4081 VALGRIND_RESTORE_STACK \
4082 "mov %0, r0" \
4083 : /*out*/ "=r" (_res) \
4084 : /*in*/ "0" (&_argvec[0]) \
4085 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4086 ); \
4087 lval = (__typeof__(lval)) _res; \
4088 } while (0)
4089
4090#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4091 arg7,arg8,arg9,arg10) \
4092 do { \
4093 volatile OrigFn _orig = (orig); \
4094 volatile unsigned long _argvec[11]; \
4095 volatile unsigned long _res; \
4096 _argvec[0] = (unsigned long)_orig.nraddr; \
4097 _argvec[1] = (unsigned long)(arg1); \
4098 _argvec[2] = (unsigned long)(arg2); \
4099 _argvec[3] = (unsigned long)(arg3); \
4100 _argvec[4] = (unsigned long)(arg4); \
4101 _argvec[5] = (unsigned long)(arg5); \
4102 _argvec[6] = (unsigned long)(arg6); \
4103 _argvec[7] = (unsigned long)(arg7); \
4104 _argvec[8] = (unsigned long)(arg8); \
4105 _argvec[9] = (unsigned long)(arg9); \
4106 _argvec[10] = (unsigned long)(arg10); \
4107 __asm__ volatile( \
4108 VALGRIND_ALIGN_STACK \
4109 "ldr r0, [%1, #40] \n\t" \
4110 "push {r0} \n\t" \
4111 "ldr r0, [%1, #20] \n\t" \
4112 "ldr r1, [%1, #24] \n\t" \
4113 "ldr r2, [%1, #28] \n\t" \
4114 "ldr r3, [%1, #32] \n\t" \
4115 "ldr r4, [%1, #36] \n\t" \
4116 "push {r0, r1, r2, r3, r4} \n\t" \
4117 "ldr r0, [%1, #4] \n\t" \
4118 "ldr r1, [%1, #8] \n\t" \
4119 "ldr r2, [%1, #12] \n\t" \
4120 "ldr r3, [%1, #16] \n\t" \
4121 "ldr r4, [%1] \n\t" /* target->r4 */ \
4122 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4123 VALGRIND_RESTORE_STACK \
4124 "mov %0, r0" \
4125 : /*out*/ "=r" (_res) \
4126 : /*in*/ "0" (&_argvec[0]) \
4127 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4128 ); \
4129 lval = (__typeof__(lval)) _res; \
4130 } while (0)
4131
4132#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4133 arg6,arg7,arg8,arg9,arg10, \
4134 arg11) \
4135 do { \
4136 volatile OrigFn _orig = (orig); \
4137 volatile unsigned long _argvec[12]; \
4138 volatile unsigned long _res; \
4139 _argvec[0] = (unsigned long)_orig.nraddr; \
4140 _argvec[1] = (unsigned long)(arg1); \
4141 _argvec[2] = (unsigned long)(arg2); \
4142 _argvec[3] = (unsigned long)(arg3); \
4143 _argvec[4] = (unsigned long)(arg4); \
4144 _argvec[5] = (unsigned long)(arg5); \
4145 _argvec[6] = (unsigned long)(arg6); \
4146 _argvec[7] = (unsigned long)(arg7); \
4147 _argvec[8] = (unsigned long)(arg8); \
4148 _argvec[9] = (unsigned long)(arg9); \
4149 _argvec[10] = (unsigned long)(arg10); \
4150 _argvec[11] = (unsigned long)(arg11); \
4151 __asm__ volatile( \
4152 VALGRIND_ALIGN_STACK \
4153 "sub sp, sp, #4 \n\t" \
4154 "ldr r0, [%1, #40] \n\t" \
4155 "ldr r1, [%1, #44] \n\t" \
4156 "push {r0, r1} \n\t" \
4157 "ldr r0, [%1, #20] \n\t" \
4158 "ldr r1, [%1, #24] \n\t" \
4159 "ldr r2, [%1, #28] \n\t" \
4160 "ldr r3, [%1, #32] \n\t" \
4161 "ldr r4, [%1, #36] \n\t" \
4162 "push {r0, r1, r2, r3, r4} \n\t" \
4163 "ldr r0, [%1, #4] \n\t" \
4164 "ldr r1, [%1, #8] \n\t" \
4165 "ldr r2, [%1, #12] \n\t" \
4166 "ldr r3, [%1, #16] \n\t" \
4167 "ldr r4, [%1] \n\t" /* target->r4 */ \
4168 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4169 VALGRIND_RESTORE_STACK \
4170 "mov %0, r0" \
4171 : /*out*/ "=r" (_res) \
4172 : /*in*/ "0" (&_argvec[0]) \
4173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4174 ); \
4175 lval = (__typeof__(lval)) _res; \
4176 } while (0)
4177
4178#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4179 arg6,arg7,arg8,arg9,arg10, \
4180 arg11,arg12) \
4181 do { \
4182 volatile OrigFn _orig = (orig); \
4183 volatile unsigned long _argvec[13]; \
4184 volatile unsigned long _res; \
4185 _argvec[0] = (unsigned long)_orig.nraddr; \
4186 _argvec[1] = (unsigned long)(arg1); \
4187 _argvec[2] = (unsigned long)(arg2); \
4188 _argvec[3] = (unsigned long)(arg3); \
4189 _argvec[4] = (unsigned long)(arg4); \
4190 _argvec[5] = (unsigned long)(arg5); \
4191 _argvec[6] = (unsigned long)(arg6); \
4192 _argvec[7] = (unsigned long)(arg7); \
4193 _argvec[8] = (unsigned long)(arg8); \
4194 _argvec[9] = (unsigned long)(arg9); \
4195 _argvec[10] = (unsigned long)(arg10); \
4196 _argvec[11] = (unsigned long)(arg11); \
4197 _argvec[12] = (unsigned long)(arg12); \
4198 __asm__ volatile( \
4199 VALGRIND_ALIGN_STACK \
4200 "ldr r0, [%1, #40] \n\t" \
4201 "ldr r1, [%1, #44] \n\t" \
4202 "ldr r2, [%1, #48] \n\t" \
4203 "push {r0, r1, r2} \n\t" \
4204 "ldr r0, [%1, #20] \n\t" \
4205 "ldr r1, [%1, #24] \n\t" \
4206 "ldr r2, [%1, #28] \n\t" \
4207 "ldr r3, [%1, #32] \n\t" \
4208 "ldr r4, [%1, #36] \n\t" \
4209 "push {r0, r1, r2, r3, r4} \n\t" \
4210 "ldr r0, [%1, #4] \n\t" \
4211 "ldr r1, [%1, #8] \n\t" \
4212 "ldr r2, [%1, #12] \n\t" \
4213 "ldr r3, [%1, #16] \n\t" \
4214 "ldr r4, [%1] \n\t" /* target->r4 */ \
4215 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4216 VALGRIND_RESTORE_STACK \
4217 "mov %0, r0" \
4218 : /*out*/ "=r" (_res) \
4219 : /*in*/ "0" (&_argvec[0]) \
4220 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4221 ); \
4222 lval = (__typeof__(lval)) _res; \
4223 } while (0)
4224
4225#endif /* PLAT_arm_linux */
4226
4227/* ------------------------ arm64-linux ------------------------ */
4228
4229#if defined(PLAT_arm64_linux)
4230
4231/* These regs are trashed by the hidden call. */
4232#define __CALLER_SAVED_REGS \
4233 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4234 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4235 "x18", "x19", "x20", "x30", \
4236 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4237 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4238 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4239 "v26", "v27", "v28", "v29", "v30", "v31"
4240
4241/* x21 is callee-saved, so we can use it to save and restore SP around
4242 the hidden call. */
4243#define VALGRIND_ALIGN_STACK \
4244 "mov x21, sp\n\t" \
4245 "bic sp, x21, #15\n\t"
4246#define VALGRIND_RESTORE_STACK \
4247 "mov sp, x21\n\t"
4248
4249/* These CALL_FN_ macros assume that on arm64-linux,
4250 sizeof(unsigned long) == 8. */
4251
4252#define CALL_FN_W_v(lval, orig) \
4253 do { \
4254 volatile OrigFn _orig = (orig); \
4255 volatile unsigned long _argvec[1]; \
4256 volatile unsigned long _res; \
4257 _argvec[0] = (unsigned long)_orig.nraddr; \
4258 __asm__ volatile( \
4259 VALGRIND_ALIGN_STACK \
4260 "ldr x8, [%1] \n\t" /* target->x8 */ \
4261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4262 VALGRIND_RESTORE_STACK \
4263 "mov %0, x0\n" \
4264 : /*out*/ "=r" (_res) \
4265 : /*in*/ "0" (&_argvec[0]) \
4266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4267 ); \
4268 lval = (__typeof__(lval)) _res; \
4269 } while (0)
4270
4271#define CALL_FN_W_W(lval, orig, arg1) \
4272 do { \
4273 volatile OrigFn _orig = (orig); \
4274 volatile unsigned long _argvec[2]; \
4275 volatile unsigned long _res; \
4276 _argvec[0] = (unsigned long)_orig.nraddr; \
4277 _argvec[1] = (unsigned long)(arg1); \
4278 __asm__ volatile( \
4279 VALGRIND_ALIGN_STACK \
4280 "ldr x0, [%1, #8] \n\t" \
4281 "ldr x8, [%1] \n\t" /* target->x8 */ \
4282 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4283 VALGRIND_RESTORE_STACK \
4284 "mov %0, x0\n" \
4285 : /*out*/ "=r" (_res) \
4286 : /*in*/ "0" (&_argvec[0]) \
4287 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4288 ); \
4289 lval = (__typeof__(lval)) _res; \
4290 } while (0)
4291
4292#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4293 do { \
4294 volatile OrigFn _orig = (orig); \
4295 volatile unsigned long _argvec[3]; \
4296 volatile unsigned long _res; \
4297 _argvec[0] = (unsigned long)_orig.nraddr; \
4298 _argvec[1] = (unsigned long)(arg1); \
4299 _argvec[2] = (unsigned long)(arg2); \
4300 __asm__ volatile( \
4301 VALGRIND_ALIGN_STACK \
4302 "ldr x0, [%1, #8] \n\t" \
4303 "ldr x1, [%1, #16] \n\t" \
4304 "ldr x8, [%1] \n\t" /* target->x8 */ \
4305 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4306 VALGRIND_RESTORE_STACK \
4307 "mov %0, x0\n" \
4308 : /*out*/ "=r" (_res) \
4309 : /*in*/ "0" (&_argvec[0]) \
4310 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4311 ); \
4312 lval = (__typeof__(lval)) _res; \
4313 } while (0)
4314
4315#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4316 do { \
4317 volatile OrigFn _orig = (orig); \
4318 volatile unsigned long _argvec[4]; \
4319 volatile unsigned long _res; \
4320 _argvec[0] = (unsigned long)_orig.nraddr; \
4321 _argvec[1] = (unsigned long)(arg1); \
4322 _argvec[2] = (unsigned long)(arg2); \
4323 _argvec[3] = (unsigned long)(arg3); \
4324 __asm__ volatile( \
4325 VALGRIND_ALIGN_STACK \
4326 "ldr x0, [%1, #8] \n\t" \
4327 "ldr x1, [%1, #16] \n\t" \
4328 "ldr x2, [%1, #24] \n\t" \
4329 "ldr x8, [%1] \n\t" /* target->x8 */ \
4330 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4331 VALGRIND_RESTORE_STACK \
4332 "mov %0, x0\n" \
4333 : /*out*/ "=r" (_res) \
4334 : /*in*/ "0" (&_argvec[0]) \
4335 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4336 ); \
4337 lval = (__typeof__(lval)) _res; \
4338 } while (0)
4339
4340#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4341 do { \
4342 volatile OrigFn _orig = (orig); \
4343 volatile unsigned long _argvec[5]; \
4344 volatile unsigned long _res; \
4345 _argvec[0] = (unsigned long)_orig.nraddr; \
4346 _argvec[1] = (unsigned long)(arg1); \
4347 _argvec[2] = (unsigned long)(arg2); \
4348 _argvec[3] = (unsigned long)(arg3); \
4349 _argvec[4] = (unsigned long)(arg4); \
4350 __asm__ volatile( \
4351 VALGRIND_ALIGN_STACK \
4352 "ldr x0, [%1, #8] \n\t" \
4353 "ldr x1, [%1, #16] \n\t" \
4354 "ldr x2, [%1, #24] \n\t" \
4355 "ldr x3, [%1, #32] \n\t" \
4356 "ldr x8, [%1] \n\t" /* target->x8 */ \
4357 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4358 VALGRIND_RESTORE_STACK \
4359 "mov %0, x0" \
4360 : /*out*/ "=r" (_res) \
4361 : /*in*/ "0" (&_argvec[0]) \
4362 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4363 ); \
4364 lval = (__typeof__(lval)) _res; \
4365 } while (0)
4366
4367#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4368 do { \
4369 volatile OrigFn _orig = (orig); \
4370 volatile unsigned long _argvec[6]; \
4371 volatile unsigned long _res; \
4372 _argvec[0] = (unsigned long)_orig.nraddr; \
4373 _argvec[1] = (unsigned long)(arg1); \
4374 _argvec[2] = (unsigned long)(arg2); \
4375 _argvec[3] = (unsigned long)(arg3); \
4376 _argvec[4] = (unsigned long)(arg4); \
4377 _argvec[5] = (unsigned long)(arg5); \
4378 __asm__ volatile( \
4379 VALGRIND_ALIGN_STACK \
4380 "ldr x0, [%1, #8] \n\t" \
4381 "ldr x1, [%1, #16] \n\t" \
4382 "ldr x2, [%1, #24] \n\t" \
4383 "ldr x3, [%1, #32] \n\t" \
4384 "ldr x4, [%1, #40] \n\t" \
4385 "ldr x8, [%1] \n\t" /* target->x8 */ \
4386 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4387 VALGRIND_RESTORE_STACK \
4388 "mov %0, x0" \
4389 : /*out*/ "=r" (_res) \
4390 : /*in*/ "0" (&_argvec[0]) \
4391 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4392 ); \
4393 lval = (__typeof__(lval)) _res; \
4394 } while (0)
4395
4396#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4397 do { \
4398 volatile OrigFn _orig = (orig); \
4399 volatile unsigned long _argvec[7]; \
4400 volatile unsigned long _res; \
4401 _argvec[0] = (unsigned long)_orig.nraddr; \
4402 _argvec[1] = (unsigned long)(arg1); \
4403 _argvec[2] = (unsigned long)(arg2); \
4404 _argvec[3] = (unsigned long)(arg3); \
4405 _argvec[4] = (unsigned long)(arg4); \
4406 _argvec[5] = (unsigned long)(arg5); \
4407 _argvec[6] = (unsigned long)(arg6); \
4408 __asm__ volatile( \
4409 VALGRIND_ALIGN_STACK \
4410 "ldr x0, [%1, #8] \n\t" \
4411 "ldr x1, [%1, #16] \n\t" \
4412 "ldr x2, [%1, #24] \n\t" \
4413 "ldr x3, [%1, #32] \n\t" \
4414 "ldr x4, [%1, #40] \n\t" \
4415 "ldr x5, [%1, #48] \n\t" \
4416 "ldr x8, [%1] \n\t" /* target->x8 */ \
4417 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4418 VALGRIND_RESTORE_STACK \
4419 "mov %0, x0" \
4420 : /*out*/ "=r" (_res) \
4421 : /*in*/ "0" (&_argvec[0]) \
4422 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4423 ); \
4424 lval = (__typeof__(lval)) _res; \
4425 } while (0)
4426
4427#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4428 arg7) \
4429 do { \
4430 volatile OrigFn _orig = (orig); \
4431 volatile unsigned long _argvec[8]; \
4432 volatile unsigned long _res; \
4433 _argvec[0] = (unsigned long)_orig.nraddr; \
4434 _argvec[1] = (unsigned long)(arg1); \
4435 _argvec[2] = (unsigned long)(arg2); \
4436 _argvec[3] = (unsigned long)(arg3); \
4437 _argvec[4] = (unsigned long)(arg4); \
4438 _argvec[5] = (unsigned long)(arg5); \
4439 _argvec[6] = (unsigned long)(arg6); \
4440 _argvec[7] = (unsigned long)(arg7); \
4441 __asm__ volatile( \
4442 VALGRIND_ALIGN_STACK \
4443 "ldr x0, [%1, #8] \n\t" \
4444 "ldr x1, [%1, #16] \n\t" \
4445 "ldr x2, [%1, #24] \n\t" \
4446 "ldr x3, [%1, #32] \n\t" \
4447 "ldr x4, [%1, #40] \n\t" \
4448 "ldr x5, [%1, #48] \n\t" \
4449 "ldr x6, [%1, #56] \n\t" \
4450 "ldr x8, [%1] \n\t" /* target->x8 */ \
4451 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4452 VALGRIND_RESTORE_STACK \
4453 "mov %0, x0" \
4454 : /*out*/ "=r" (_res) \
4455 : /*in*/ "0" (&_argvec[0]) \
4456 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4457 ); \
4458 lval = (__typeof__(lval)) _res; \
4459 } while (0)
4460
4461#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4462 arg7,arg8) \
4463 do { \
4464 volatile OrigFn _orig = (orig); \
4465 volatile unsigned long _argvec[9]; \
4466 volatile unsigned long _res; \
4467 _argvec[0] = (unsigned long)_orig.nraddr; \
4468 _argvec[1] = (unsigned long)(arg1); \
4469 _argvec[2] = (unsigned long)(arg2); \
4470 _argvec[3] = (unsigned long)(arg3); \
4471 _argvec[4] = (unsigned long)(arg4); \
4472 _argvec[5] = (unsigned long)(arg5); \
4473 _argvec[6] = (unsigned long)(arg6); \
4474 _argvec[7] = (unsigned long)(arg7); \
4475 _argvec[8] = (unsigned long)(arg8); \
4476 __asm__ volatile( \
4477 VALGRIND_ALIGN_STACK \
4478 "ldr x0, [%1, #8] \n\t" \
4479 "ldr x1, [%1, #16] \n\t" \
4480 "ldr x2, [%1, #24] \n\t" \
4481 "ldr x3, [%1, #32] \n\t" \
4482 "ldr x4, [%1, #40] \n\t" \
4483 "ldr x5, [%1, #48] \n\t" \
4484 "ldr x6, [%1, #56] \n\t" \
4485 "ldr x7, [%1, #64] \n\t" \
4486 "ldr x8, [%1] \n\t" /* target->x8 */ \
4487 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4488 VALGRIND_RESTORE_STACK \
4489 "mov %0, x0" \
4490 : /*out*/ "=r" (_res) \
4491 : /*in*/ "0" (&_argvec[0]) \
4492 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4493 ); \
4494 lval = (__typeof__(lval)) _res; \
4495 } while (0)
4496
4497#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4498 arg7,arg8,arg9) \
4499 do { \
4500 volatile OrigFn _orig = (orig); \
4501 volatile unsigned long _argvec[10]; \
4502 volatile unsigned long _res; \
4503 _argvec[0] = (unsigned long)_orig.nraddr; \
4504 _argvec[1] = (unsigned long)(arg1); \
4505 _argvec[2] = (unsigned long)(arg2); \
4506 _argvec[3] = (unsigned long)(arg3); \
4507 _argvec[4] = (unsigned long)(arg4); \
4508 _argvec[5] = (unsigned long)(arg5); \
4509 _argvec[6] = (unsigned long)(arg6); \
4510 _argvec[7] = (unsigned long)(arg7); \
4511 _argvec[8] = (unsigned long)(arg8); \
4512 _argvec[9] = (unsigned long)(arg9); \
4513 __asm__ volatile( \
4514 VALGRIND_ALIGN_STACK \
4515 "sub sp, sp, #0x20 \n\t" \
4516 "ldr x0, [%1, #8] \n\t" \
4517 "ldr x1, [%1, #16] \n\t" \
4518 "ldr x2, [%1, #24] \n\t" \
4519 "ldr x3, [%1, #32] \n\t" \
4520 "ldr x4, [%1, #40] \n\t" \
4521 "ldr x5, [%1, #48] \n\t" \
4522 "ldr x6, [%1, #56] \n\t" \
4523 "ldr x7, [%1, #64] \n\t" \
4524 "ldr x8, [%1, #72] \n\t" \
4525 "str x8, [sp, #0] \n\t" \
4526 "ldr x8, [%1] \n\t" /* target->x8 */ \
4527 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4528 VALGRIND_RESTORE_STACK \
4529 "mov %0, x0" \
4530 : /*out*/ "=r" (_res) \
4531 : /*in*/ "0" (&_argvec[0]) \
4532 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4533 ); \
4534 lval = (__typeof__(lval)) _res; \
4535 } while (0)
4536
4537#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4538 arg7,arg8,arg9,arg10) \
4539 do { \
4540 volatile OrigFn _orig = (orig); \
4541 volatile unsigned long _argvec[11]; \
4542 volatile unsigned long _res; \
4543 _argvec[0] = (unsigned long)_orig.nraddr; \
4544 _argvec[1] = (unsigned long)(arg1); \
4545 _argvec[2] = (unsigned long)(arg2); \
4546 _argvec[3] = (unsigned long)(arg3); \
4547 _argvec[4] = (unsigned long)(arg4); \
4548 _argvec[5] = (unsigned long)(arg5); \
4549 _argvec[6] = (unsigned long)(arg6); \
4550 _argvec[7] = (unsigned long)(arg7); \
4551 _argvec[8] = (unsigned long)(arg8); \
4552 _argvec[9] = (unsigned long)(arg9); \
4553 _argvec[10] = (unsigned long)(arg10); \
4554 __asm__ volatile( \
4555 VALGRIND_ALIGN_STACK \
4556 "sub sp, sp, #0x20 \n\t" \
4557 "ldr x0, [%1, #8] \n\t" \
4558 "ldr x1, [%1, #16] \n\t" \
4559 "ldr x2, [%1, #24] \n\t" \
4560 "ldr x3, [%1, #32] \n\t" \
4561 "ldr x4, [%1, #40] \n\t" \
4562 "ldr x5, [%1, #48] \n\t" \
4563 "ldr x6, [%1, #56] \n\t" \
4564 "ldr x7, [%1, #64] \n\t" \
4565 "ldr x8, [%1, #72] \n\t" \
4566 "str x8, [sp, #0] \n\t" \
4567 "ldr x8, [%1, #80] \n\t" \
4568 "str x8, [sp, #8] \n\t" \
4569 "ldr x8, [%1] \n\t" /* target->x8 */ \
4570 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4571 VALGRIND_RESTORE_STACK \
4572 "mov %0, x0" \
4573 : /*out*/ "=r" (_res) \
4574 : /*in*/ "0" (&_argvec[0]) \
4575 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4576 ); \
4577 lval = (__typeof__(lval)) _res; \
4578 } while (0)
4579
4580#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4581 arg7,arg8,arg9,arg10,arg11) \
4582 do { \
4583 volatile OrigFn _orig = (orig); \
4584 volatile unsigned long _argvec[12]; \
4585 volatile unsigned long _res; \
4586 _argvec[0] = (unsigned long)_orig.nraddr; \
4587 _argvec[1] = (unsigned long)(arg1); \
4588 _argvec[2] = (unsigned long)(arg2); \
4589 _argvec[3] = (unsigned long)(arg3); \
4590 _argvec[4] = (unsigned long)(arg4); \
4591 _argvec[5] = (unsigned long)(arg5); \
4592 _argvec[6] = (unsigned long)(arg6); \
4593 _argvec[7] = (unsigned long)(arg7); \
4594 _argvec[8] = (unsigned long)(arg8); \
4595 _argvec[9] = (unsigned long)(arg9); \
4596 _argvec[10] = (unsigned long)(arg10); \
4597 _argvec[11] = (unsigned long)(arg11); \
4598 __asm__ volatile( \
4599 VALGRIND_ALIGN_STACK \
4600 "sub sp, sp, #0x30 \n\t" \
4601 "ldr x0, [%1, #8] \n\t" \
4602 "ldr x1, [%1, #16] \n\t" \
4603 "ldr x2, [%1, #24] \n\t" \
4604 "ldr x3, [%1, #32] \n\t" \
4605 "ldr x4, [%1, #40] \n\t" \
4606 "ldr x5, [%1, #48] \n\t" \
4607 "ldr x6, [%1, #56] \n\t" \
4608 "ldr x7, [%1, #64] \n\t" \
4609 "ldr x8, [%1, #72] \n\t" \
4610 "str x8, [sp, #0] \n\t" \
4611 "ldr x8, [%1, #80] \n\t" \
4612 "str x8, [sp, #8] \n\t" \
4613 "ldr x8, [%1, #88] \n\t" \
4614 "str x8, [sp, #16] \n\t" \
4615 "ldr x8, [%1] \n\t" /* target->x8 */ \
4616 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4617 VALGRIND_RESTORE_STACK \
4618 "mov %0, x0" \
4619 : /*out*/ "=r" (_res) \
4620 : /*in*/ "0" (&_argvec[0]) \
4621 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4622 ); \
4623 lval = (__typeof__(lval)) _res; \
4624 } while (0)
4625
4626#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4627 arg7,arg8,arg9,arg10,arg11, \
4628 arg12) \
4629 do { \
4630 volatile OrigFn _orig = (orig); \
4631 volatile unsigned long _argvec[13]; \
4632 volatile unsigned long _res; \
4633 _argvec[0] = (unsigned long)_orig.nraddr; \
4634 _argvec[1] = (unsigned long)(arg1); \
4635 _argvec[2] = (unsigned long)(arg2); \
4636 _argvec[3] = (unsigned long)(arg3); \
4637 _argvec[4] = (unsigned long)(arg4); \
4638 _argvec[5] = (unsigned long)(arg5); \
4639 _argvec[6] = (unsigned long)(arg6); \
4640 _argvec[7] = (unsigned long)(arg7); \
4641 _argvec[8] = (unsigned long)(arg8); \
4642 _argvec[9] = (unsigned long)(arg9); \
4643 _argvec[10] = (unsigned long)(arg10); \
4644 _argvec[11] = (unsigned long)(arg11); \
4645 _argvec[12] = (unsigned long)(arg12); \
4646 __asm__ volatile( \
4647 VALGRIND_ALIGN_STACK \
4648 "sub sp, sp, #0x30 \n\t" \
4649 "ldr x0, [%1, #8] \n\t" \
4650 "ldr x1, [%1, #16] \n\t" \
4651 "ldr x2, [%1, #24] \n\t" \
4652 "ldr x3, [%1, #32] \n\t" \
4653 "ldr x4, [%1, #40] \n\t" \
4654 "ldr x5, [%1, #48] \n\t" \
4655 "ldr x6, [%1, #56] \n\t" \
4656 "ldr x7, [%1, #64] \n\t" \
4657 "ldr x8, [%1, #72] \n\t" \
4658 "str x8, [sp, #0] \n\t" \
4659 "ldr x8, [%1, #80] \n\t" \
4660 "str x8, [sp, #8] \n\t" \
4661 "ldr x8, [%1, #88] \n\t" \
4662 "str x8, [sp, #16] \n\t" \
4663 "ldr x8, [%1, #96] \n\t" \
4664 "str x8, [sp, #24] \n\t" \
4665 "ldr x8, [%1] \n\t" /* target->x8 */ \
4666 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4667 VALGRIND_RESTORE_STACK \
4668 "mov %0, x0" \
4669 : /*out*/ "=r" (_res) \
4670 : /*in*/ "0" (&_argvec[0]) \
4671 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4672 ); \
4673 lval = (__typeof__(lval)) _res; \
4674 } while (0)
4675
4676#endif /* PLAT_arm64_linux */
4677
4678/* ------------------------- s390x-linux ------------------------- */
4679
4680#if defined(PLAT_s390x_linux)
4681
4682/* Similar workaround as amd64 (see above), but we use r11 as frame
4683 pointer and save the old r11 in r7. r11 might be used for
4684 argvec, therefore we copy argvec in r1 since r1 is clobbered
4685 after the call anyway. */
4686#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4687# define __FRAME_POINTER \
4688 ,"d"(__builtin_dwarf_cfa())
4689# define VALGRIND_CFI_PROLOGUE \
4690 ".cfi_remember_state\n\t" \
4691 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4692 "lgr 7,11\n\t" \
4693 "lgr 11,%2\n\t" \
4694 ".cfi_def_cfa r11, 0\n\t"
4695# define VALGRIND_CFI_EPILOGUE \
4696 "lgr 11, 7\n\t" \
4697 ".cfi_restore_state\n\t"
4698#else
4699# define __FRAME_POINTER
4700# define VALGRIND_CFI_PROLOGUE \
4701 "lgr 1,%1\n\t"
4702# define VALGRIND_CFI_EPILOGUE
4703#endif
4704
4705/* Nb: On s390 the stack pointer is properly aligned *at all times*
4706 according to the s390 GCC maintainer. (The ABI specification is not
4707 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4708 VALGRIND_RESTORE_STACK are not defined here. */
4709
4710/* These regs are trashed by the hidden call. Note that we overwrite
4711 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4712 function a proper return address. All others are ABI defined call
4713 clobbers. */
4714#if defined(__VX__) || defined(__S390_VX__)
4715#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4716 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4717 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4718 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4719 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4720#else
4721#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4722 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4723#endif
4724
4725/* Nb: Although r11 is modified in the asm snippets below (inside
4726 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4727 two reasons:
4728 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4729 modified
4730 (2) GCC will complain that r11 cannot appear inside a clobber section,
4731 when compiled with -O -fno-omit-frame-pointer
4732 */
4733
4734#define CALL_FN_W_v(lval, orig) \
4735 do { \
4736 volatile OrigFn _orig = (orig); \
4737 volatile unsigned long _argvec[1]; \
4738 volatile unsigned long _res; \
4739 _argvec[0] = (unsigned long)_orig.nraddr; \
4740 __asm__ volatile( \
4741 VALGRIND_CFI_PROLOGUE \
4742 "aghi 15,-160\n\t" \
4743 "lg 1, 0(1)\n\t" /* target->r1 */ \
4744 VALGRIND_CALL_NOREDIR_R1 \
4745 "aghi 15,160\n\t" \
4746 VALGRIND_CFI_EPILOGUE \
4747 "lgr %0, 2\n\t" \
4748 : /*out*/ "=d" (_res) \
4749 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4750 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4751 ); \
4752 lval = (__typeof__(lval)) _res; \
4753 } while (0)
4754
4755/* The call abi has the arguments in r2-r6 and stack */
4756#define CALL_FN_W_W(lval, orig, arg1) \
4757 do { \
4758 volatile OrigFn _orig = (orig); \
4759 volatile unsigned long _argvec[2]; \
4760 volatile unsigned long _res; \
4761 _argvec[0] = (unsigned long)_orig.nraddr; \
4762 _argvec[1] = (unsigned long)arg1; \
4763 __asm__ volatile( \
4764 VALGRIND_CFI_PROLOGUE \
4765 "aghi 15,-160\n\t" \
4766 "lg 2, 8(1)\n\t" \
4767 "lg 1, 0(1)\n\t" \
4768 VALGRIND_CALL_NOREDIR_R1 \
4769 "aghi 15,160\n\t" \
4770 VALGRIND_CFI_EPILOGUE \
4771 "lgr %0, 2\n\t" \
4772 : /*out*/ "=d" (_res) \
4773 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4774 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4775 ); \
4776 lval = (__typeof__(lval)) _res; \
4777 } while (0)
4778
4779#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4780 do { \
4781 volatile OrigFn _orig = (orig); \
4782 volatile unsigned long _argvec[3]; \
4783 volatile unsigned long _res; \
4784 _argvec[0] = (unsigned long)_orig.nraddr; \
4785 _argvec[1] = (unsigned long)arg1; \
4786 _argvec[2] = (unsigned long)arg2; \
4787 __asm__ volatile( \
4788 VALGRIND_CFI_PROLOGUE \
4789 "aghi 15,-160\n\t" \
4790 "lg 2, 8(1)\n\t" \
4791 "lg 3,16(1)\n\t" \
4792 "lg 1, 0(1)\n\t" \
4793 VALGRIND_CALL_NOREDIR_R1 \
4794 "aghi 15,160\n\t" \
4795 VALGRIND_CFI_EPILOGUE \
4796 "lgr %0, 2\n\t" \
4797 : /*out*/ "=d" (_res) \
4798 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4799 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4800 ); \
4801 lval = (__typeof__(lval)) _res; \
4802 } while (0)
4803
4804#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4805 do { \
4806 volatile OrigFn _orig = (orig); \
4807 volatile unsigned long _argvec[4]; \
4808 volatile unsigned long _res; \
4809 _argvec[0] = (unsigned long)_orig.nraddr; \
4810 _argvec[1] = (unsigned long)arg1; \
4811 _argvec[2] = (unsigned long)arg2; \
4812 _argvec[3] = (unsigned long)arg3; \
4813 __asm__ volatile( \
4814 VALGRIND_CFI_PROLOGUE \
4815 "aghi 15,-160\n\t" \
4816 "lg 2, 8(1)\n\t" \
4817 "lg 3,16(1)\n\t" \
4818 "lg 4,24(1)\n\t" \
4819 "lg 1, 0(1)\n\t" \
4820 VALGRIND_CALL_NOREDIR_R1 \
4821 "aghi 15,160\n\t" \
4822 VALGRIND_CFI_EPILOGUE \
4823 "lgr %0, 2\n\t" \
4824 : /*out*/ "=d" (_res) \
4825 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4826 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4827 ); \
4828 lval = (__typeof__(lval)) _res; \
4829 } while (0)
4830
4831#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4832 do { \
4833 volatile OrigFn _orig = (orig); \
4834 volatile unsigned long _argvec[5]; \
4835 volatile unsigned long _res; \
4836 _argvec[0] = (unsigned long)_orig.nraddr; \
4837 _argvec[1] = (unsigned long)arg1; \
4838 _argvec[2] = (unsigned long)arg2; \
4839 _argvec[3] = (unsigned long)arg3; \
4840 _argvec[4] = (unsigned long)arg4; \
4841 __asm__ volatile( \
4842 VALGRIND_CFI_PROLOGUE \
4843 "aghi 15,-160\n\t" \
4844 "lg 2, 8(1)\n\t" \
4845 "lg 3,16(1)\n\t" \
4846 "lg 4,24(1)\n\t" \
4847 "lg 5,32(1)\n\t" \
4848 "lg 1, 0(1)\n\t" \
4849 VALGRIND_CALL_NOREDIR_R1 \
4850 "aghi 15,160\n\t" \
4851 VALGRIND_CFI_EPILOGUE \
4852 "lgr %0, 2\n\t" \
4853 : /*out*/ "=d" (_res) \
4854 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4855 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4856 ); \
4857 lval = (__typeof__(lval)) _res; \
4858 } while (0)
4859
4860#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4861 do { \
4862 volatile OrigFn _orig = (orig); \
4863 volatile unsigned long _argvec[6]; \
4864 volatile unsigned long _res; \
4865 _argvec[0] = (unsigned long)_orig.nraddr; \
4866 _argvec[1] = (unsigned long)arg1; \
4867 _argvec[2] = (unsigned long)arg2; \
4868 _argvec[3] = (unsigned long)arg3; \
4869 _argvec[4] = (unsigned long)arg4; \
4870 _argvec[5] = (unsigned long)arg5; \
4871 __asm__ volatile( \
4872 VALGRIND_CFI_PROLOGUE \
4873 "aghi 15,-160\n\t" \
4874 "lg 2, 8(1)\n\t" \
4875 "lg 3,16(1)\n\t" \
4876 "lg 4,24(1)\n\t" \
4877 "lg 5,32(1)\n\t" \
4878 "lg 6,40(1)\n\t" \
4879 "lg 1, 0(1)\n\t" \
4880 VALGRIND_CALL_NOREDIR_R1 \
4881 "aghi 15,160\n\t" \
4882 VALGRIND_CFI_EPILOGUE \
4883 "lgr %0, 2\n\t" \
4884 : /*out*/ "=d" (_res) \
4885 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4886 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4887 ); \
4888 lval = (__typeof__(lval)) _res; \
4889 } while (0)
4890
4891#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4892 arg6) \
4893 do { \
4894 volatile OrigFn _orig = (orig); \
4895 volatile unsigned long _argvec[7]; \
4896 volatile unsigned long _res; \
4897 _argvec[0] = (unsigned long)_orig.nraddr; \
4898 _argvec[1] = (unsigned long)arg1; \
4899 _argvec[2] = (unsigned long)arg2; \
4900 _argvec[3] = (unsigned long)arg3; \
4901 _argvec[4] = (unsigned long)arg4; \
4902 _argvec[5] = (unsigned long)arg5; \
4903 _argvec[6] = (unsigned long)arg6; \
4904 __asm__ volatile( \
4905 VALGRIND_CFI_PROLOGUE \
4906 "aghi 15,-168\n\t" \
4907 "lg 2, 8(1)\n\t" \
4908 "lg 3,16(1)\n\t" \
4909 "lg 4,24(1)\n\t" \
4910 "lg 5,32(1)\n\t" \
4911 "lg 6,40(1)\n\t" \
4912 "mvc 160(8,15), 48(1)\n\t" \
4913 "lg 1, 0(1)\n\t" \
4914 VALGRIND_CALL_NOREDIR_R1 \
4915 "aghi 15,168\n\t" \
4916 VALGRIND_CFI_EPILOGUE \
4917 "lgr %0, 2\n\t" \
4918 : /*out*/ "=d" (_res) \
4919 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4920 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4921 ); \
4922 lval = (__typeof__(lval)) _res; \
4923 } while (0)
4924
4925#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4926 arg6, arg7) \
4927 do { \
4928 volatile OrigFn _orig = (orig); \
4929 volatile unsigned long _argvec[8]; \
4930 volatile unsigned long _res; \
4931 _argvec[0] = (unsigned long)_orig.nraddr; \
4932 _argvec[1] = (unsigned long)arg1; \
4933 _argvec[2] = (unsigned long)arg2; \
4934 _argvec[3] = (unsigned long)arg3; \
4935 _argvec[4] = (unsigned long)arg4; \
4936 _argvec[5] = (unsigned long)arg5; \
4937 _argvec[6] = (unsigned long)arg6; \
4938 _argvec[7] = (unsigned long)arg7; \
4939 __asm__ volatile( \
4940 VALGRIND_CFI_PROLOGUE \
4941 "aghi 15,-176\n\t" \
4942 "lg 2, 8(1)\n\t" \
4943 "lg 3,16(1)\n\t" \
4944 "lg 4,24(1)\n\t" \
4945 "lg 5,32(1)\n\t" \
4946 "lg 6,40(1)\n\t" \
4947 "mvc 160(8,15), 48(1)\n\t" \
4948 "mvc 168(8,15), 56(1)\n\t" \
4949 "lg 1, 0(1)\n\t" \
4950 VALGRIND_CALL_NOREDIR_R1 \
4951 "aghi 15,176\n\t" \
4952 VALGRIND_CFI_EPILOGUE \
4953 "lgr %0, 2\n\t" \
4954 : /*out*/ "=d" (_res) \
4955 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4956 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4957 ); \
4958 lval = (__typeof__(lval)) _res; \
4959 } while (0)
4960
4961#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4962 arg6, arg7 ,arg8) \
4963 do { \
4964 volatile OrigFn _orig = (orig); \
4965 volatile unsigned long _argvec[9]; \
4966 volatile unsigned long _res; \
4967 _argvec[0] = (unsigned long)_orig.nraddr; \
4968 _argvec[1] = (unsigned long)arg1; \
4969 _argvec[2] = (unsigned long)arg2; \
4970 _argvec[3] = (unsigned long)arg3; \
4971 _argvec[4] = (unsigned long)arg4; \
4972 _argvec[5] = (unsigned long)arg5; \
4973 _argvec[6] = (unsigned long)arg6; \
4974 _argvec[7] = (unsigned long)arg7; \
4975 _argvec[8] = (unsigned long)arg8; \
4976 __asm__ volatile( \
4977 VALGRIND_CFI_PROLOGUE \
4978 "aghi 15,-184\n\t" \
4979 "lg 2, 8(1)\n\t" \
4980 "lg 3,16(1)\n\t" \
4981 "lg 4,24(1)\n\t" \
4982 "lg 5,32(1)\n\t" \
4983 "lg 6,40(1)\n\t" \
4984 "mvc 160(8,15), 48(1)\n\t" \
4985 "mvc 168(8,15), 56(1)\n\t" \
4986 "mvc 176(8,15), 64(1)\n\t" \
4987 "lg 1, 0(1)\n\t" \
4988 VALGRIND_CALL_NOREDIR_R1 \
4989 "aghi 15,184\n\t" \
4990 VALGRIND_CFI_EPILOGUE \
4991 "lgr %0, 2\n\t" \
4992 : /*out*/ "=d" (_res) \
4993 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4994 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4995 ); \
4996 lval = (__typeof__(lval)) _res; \
4997 } while (0)
4998
4999#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5000 arg6, arg7 ,arg8, arg9) \
5001 do { \
5002 volatile OrigFn _orig = (orig); \
5003 volatile unsigned long _argvec[10]; \
5004 volatile unsigned long _res; \
5005 _argvec[0] = (unsigned long)_orig.nraddr; \
5006 _argvec[1] = (unsigned long)arg1; \
5007 _argvec[2] = (unsigned long)arg2; \
5008 _argvec[3] = (unsigned long)arg3; \
5009 _argvec[4] = (unsigned long)arg4; \
5010 _argvec[5] = (unsigned long)arg5; \
5011 _argvec[6] = (unsigned long)arg6; \
5012 _argvec[7] = (unsigned long)arg7; \
5013 _argvec[8] = (unsigned long)arg8; \
5014 _argvec[9] = (unsigned long)arg9; \
5015 __asm__ volatile( \
5016 VALGRIND_CFI_PROLOGUE \
5017 "aghi 15,-192\n\t" \
5018 "lg 2, 8(1)\n\t" \
5019 "lg 3,16(1)\n\t" \
5020 "lg 4,24(1)\n\t" \
5021 "lg 5,32(1)\n\t" \
5022 "lg 6,40(1)\n\t" \
5023 "mvc 160(8,15), 48(1)\n\t" \
5024 "mvc 168(8,15), 56(1)\n\t" \
5025 "mvc 176(8,15), 64(1)\n\t" \
5026 "mvc 184(8,15), 72(1)\n\t" \
5027 "lg 1, 0(1)\n\t" \
5028 VALGRIND_CALL_NOREDIR_R1 \
5029 "aghi 15,192\n\t" \
5030 VALGRIND_CFI_EPILOGUE \
5031 "lgr %0, 2\n\t" \
5032 : /*out*/ "=d" (_res) \
5033 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5035 ); \
5036 lval = (__typeof__(lval)) _res; \
5037 } while (0)
5038
5039#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5040 arg6, arg7 ,arg8, arg9, arg10) \
5041 do { \
5042 volatile OrigFn _orig = (orig); \
5043 volatile unsigned long _argvec[11]; \
5044 volatile unsigned long _res; \
5045 _argvec[0] = (unsigned long)_orig.nraddr; \
5046 _argvec[1] = (unsigned long)arg1; \
5047 _argvec[2] = (unsigned long)arg2; \
5048 _argvec[3] = (unsigned long)arg3; \
5049 _argvec[4] = (unsigned long)arg4; \
5050 _argvec[5] = (unsigned long)arg5; \
5051 _argvec[6] = (unsigned long)arg6; \
5052 _argvec[7] = (unsigned long)arg7; \
5053 _argvec[8] = (unsigned long)arg8; \
5054 _argvec[9] = (unsigned long)arg9; \
5055 _argvec[10] = (unsigned long)arg10; \
5056 __asm__ volatile( \
5057 VALGRIND_CFI_PROLOGUE \
5058 "aghi 15,-200\n\t" \
5059 "lg 2, 8(1)\n\t" \
5060 "lg 3,16(1)\n\t" \
5061 "lg 4,24(1)\n\t" \
5062 "lg 5,32(1)\n\t" \
5063 "lg 6,40(1)\n\t" \
5064 "mvc 160(8,15), 48(1)\n\t" \
5065 "mvc 168(8,15), 56(1)\n\t" \
5066 "mvc 176(8,15), 64(1)\n\t" \
5067 "mvc 184(8,15), 72(1)\n\t" \
5068 "mvc 192(8,15), 80(1)\n\t" \
5069 "lg 1, 0(1)\n\t" \
5070 VALGRIND_CALL_NOREDIR_R1 \
5071 "aghi 15,200\n\t" \
5072 VALGRIND_CFI_EPILOGUE \
5073 "lgr %0, 2\n\t" \
5074 : /*out*/ "=d" (_res) \
5075 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5076 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5077 ); \
5078 lval = (__typeof__(lval)) _res; \
5079 } while (0)
5080
5081#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5082 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5083 do { \
5084 volatile OrigFn _orig = (orig); \
5085 volatile unsigned long _argvec[12]; \
5086 volatile unsigned long _res; \
5087 _argvec[0] = (unsigned long)_orig.nraddr; \
5088 _argvec[1] = (unsigned long)arg1; \
5089 _argvec[2] = (unsigned long)arg2; \
5090 _argvec[3] = (unsigned long)arg3; \
5091 _argvec[4] = (unsigned long)arg4; \
5092 _argvec[5] = (unsigned long)arg5; \
5093 _argvec[6] = (unsigned long)arg6; \
5094 _argvec[7] = (unsigned long)arg7; \
5095 _argvec[8] = (unsigned long)arg8; \
5096 _argvec[9] = (unsigned long)arg9; \
5097 _argvec[10] = (unsigned long)arg10; \
5098 _argvec[11] = (unsigned long)arg11; \
5099 __asm__ volatile( \
5100 VALGRIND_CFI_PROLOGUE \
5101 "aghi 15,-208\n\t" \
5102 "lg 2, 8(1)\n\t" \
5103 "lg 3,16(1)\n\t" \
5104 "lg 4,24(1)\n\t" \
5105 "lg 5,32(1)\n\t" \
5106 "lg 6,40(1)\n\t" \
5107 "mvc 160(8,15), 48(1)\n\t" \
5108 "mvc 168(8,15), 56(1)\n\t" \
5109 "mvc 176(8,15), 64(1)\n\t" \
5110 "mvc 184(8,15), 72(1)\n\t" \
5111 "mvc 192(8,15), 80(1)\n\t" \
5112 "mvc 200(8,15), 88(1)\n\t" \
5113 "lg 1, 0(1)\n\t" \
5114 VALGRIND_CALL_NOREDIR_R1 \
5115 "aghi 15,208\n\t" \
5116 VALGRIND_CFI_EPILOGUE \
5117 "lgr %0, 2\n\t" \
5118 : /*out*/ "=d" (_res) \
5119 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5120 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5121 ); \
5122 lval = (__typeof__(lval)) _res; \
5123 } while (0)
5124
5125#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5126 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5127 do { \
5128 volatile OrigFn _orig = (orig); \
5129 volatile unsigned long _argvec[13]; \
5130 volatile unsigned long _res; \
5131 _argvec[0] = (unsigned long)_orig.nraddr; \
5132 _argvec[1] = (unsigned long)arg1; \
5133 _argvec[2] = (unsigned long)arg2; \
5134 _argvec[3] = (unsigned long)arg3; \
5135 _argvec[4] = (unsigned long)arg4; \
5136 _argvec[5] = (unsigned long)arg5; \
5137 _argvec[6] = (unsigned long)arg6; \
5138 _argvec[7] = (unsigned long)arg7; \
5139 _argvec[8] = (unsigned long)arg8; \
5140 _argvec[9] = (unsigned long)arg9; \
5141 _argvec[10] = (unsigned long)arg10; \
5142 _argvec[11] = (unsigned long)arg11; \
5143 _argvec[12] = (unsigned long)arg12; \
5144 __asm__ volatile( \
5145 VALGRIND_CFI_PROLOGUE \
5146 "aghi 15,-216\n\t" \
5147 "lg 2, 8(1)\n\t" \
5148 "lg 3,16(1)\n\t" \
5149 "lg 4,24(1)\n\t" \
5150 "lg 5,32(1)\n\t" \
5151 "lg 6,40(1)\n\t" \
5152 "mvc 160(8,15), 48(1)\n\t" \
5153 "mvc 168(8,15), 56(1)\n\t" \
5154 "mvc 176(8,15), 64(1)\n\t" \
5155 "mvc 184(8,15), 72(1)\n\t" \
5156 "mvc 192(8,15), 80(1)\n\t" \
5157 "mvc 200(8,15), 88(1)\n\t" \
5158 "mvc 208(8,15), 96(1)\n\t" \
5159 "lg 1, 0(1)\n\t" \
5160 VALGRIND_CALL_NOREDIR_R1 \
5161 "aghi 15,216\n\t" \
5162 VALGRIND_CFI_EPILOGUE \
5163 "lgr %0, 2\n\t" \
5164 : /*out*/ "=d" (_res) \
5165 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5166 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5167 ); \
5168 lval = (__typeof__(lval)) _res; \
5169 } while (0)
5170
5171
5172#endif /* PLAT_s390x_linux */
5173
5174/* ------------------------- mips32-linux ----------------------- */
5175
5176#if defined(PLAT_mips32_linux)
5177
5178/* These regs are trashed by the hidden call. */
5179#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5180"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5181"$25", "$31"
5182
5183/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5184 long) == 4. */
5185
5186#define CALL_FN_W_v(lval, orig) \
5187 do { \
5188 volatile OrigFn _orig = (orig); \
5189 volatile unsigned long _argvec[1]; \
5190 volatile unsigned long _res; \
5191 _argvec[0] = (unsigned long)_orig.nraddr; \
5192 __asm__ volatile( \
5193 "subu $29, $29, 8 \n\t" \
5194 "sw $28, 0($29) \n\t" \
5195 "sw $31, 4($29) \n\t" \
5196 "subu $29, $29, 16 \n\t" \
5197 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5198 VALGRIND_CALL_NOREDIR_T9 \
5199 "addu $29, $29, 16\n\t" \
5200 "lw $28, 0($29) \n\t" \
5201 "lw $31, 4($29) \n\t" \
5202 "addu $29, $29, 8 \n\t" \
5203 "move %0, $2\n" \
5204 : /*out*/ "=r" (_res) \
5205 : /*in*/ "0" (&_argvec[0]) \
5206 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5207 ); \
5208 lval = (__typeof__(lval)) _res; \
5209 } while (0)
5210
5211#define CALL_FN_W_W(lval, orig, arg1) \
5212 do { \
5213 volatile OrigFn _orig = (orig); \
5214 volatile unsigned long _argvec[2]; \
5215 volatile unsigned long _res; \
5216 _argvec[0] = (unsigned long)_orig.nraddr; \
5217 _argvec[1] = (unsigned long)(arg1); \
5218 __asm__ volatile( \
5219 "subu $29, $29, 8 \n\t" \
5220 "sw $28, 0($29) \n\t" \
5221 "sw $31, 4($29) \n\t" \
5222 "subu $29, $29, 16 \n\t" \
5223 "lw $4, 4(%1) \n\t" /* arg1*/ \
5224 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5225 VALGRIND_CALL_NOREDIR_T9 \
5226 "addu $29, $29, 16 \n\t" \
5227 "lw $28, 0($29) \n\t" \
5228 "lw $31, 4($29) \n\t" \
5229 "addu $29, $29, 8 \n\t" \
5230 "move %0, $2\n" \
5231 : /*out*/ "=r" (_res) \
5232 : /*in*/ "0" (&_argvec[0]) \
5233 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5234 ); \
5235 lval = (__typeof__(lval)) _res; \
5236 } while (0)
5237
5238#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5239 do { \
5240 volatile OrigFn _orig = (orig); \
5241 volatile unsigned long _argvec[3]; \
5242 volatile unsigned long _res; \
5243 _argvec[0] = (unsigned long)_orig.nraddr; \
5244 _argvec[1] = (unsigned long)(arg1); \
5245 _argvec[2] = (unsigned long)(arg2); \
5246 __asm__ volatile( \
5247 "subu $29, $29, 8 \n\t" \
5248 "sw $28, 0($29) \n\t" \
5249 "sw $31, 4($29) \n\t" \
5250 "subu $29, $29, 16 \n\t" \
5251 "lw $4, 4(%1) \n\t" \
5252 "lw $5, 8(%1) \n\t" \
5253 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5254 VALGRIND_CALL_NOREDIR_T9 \
5255 "addu $29, $29, 16 \n\t" \
5256 "lw $28, 0($29) \n\t" \
5257 "lw $31, 4($29) \n\t" \
5258 "addu $29, $29, 8 \n\t" \
5259 "move %0, $2\n" \
5260 : /*out*/ "=r" (_res) \
5261 : /*in*/ "0" (&_argvec[0]) \
5262 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5263 ); \
5264 lval = (__typeof__(lval)) _res; \
5265 } while (0)
5266
5267#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5268 do { \
5269 volatile OrigFn _orig = (orig); \
5270 volatile unsigned long _argvec[4]; \
5271 volatile unsigned long _res; \
5272 _argvec[0] = (unsigned long)_orig.nraddr; \
5273 _argvec[1] = (unsigned long)(arg1); \
5274 _argvec[2] = (unsigned long)(arg2); \
5275 _argvec[3] = (unsigned long)(arg3); \
5276 __asm__ volatile( \
5277 "subu $29, $29, 8 \n\t" \
5278 "sw $28, 0($29) \n\t" \
5279 "sw $31, 4($29) \n\t" \
5280 "subu $29, $29, 16 \n\t" \
5281 "lw $4, 4(%1) \n\t" \
5282 "lw $5, 8(%1) \n\t" \
5283 "lw $6, 12(%1) \n\t" \
5284 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5285 VALGRIND_CALL_NOREDIR_T9 \
5286 "addu $29, $29, 16 \n\t" \
5287 "lw $28, 0($29) \n\t" \
5288 "lw $31, 4($29) \n\t" \
5289 "addu $29, $29, 8 \n\t" \
5290 "move %0, $2\n" \
5291 : /*out*/ "=r" (_res) \
5292 : /*in*/ "0" (&_argvec[0]) \
5293 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5294 ); \
5295 lval = (__typeof__(lval)) _res; \
5296 } while (0)
5297
5298#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5299 do { \
5300 volatile OrigFn _orig = (orig); \
5301 volatile unsigned long _argvec[5]; \
5302 volatile unsigned long _res; \
5303 _argvec[0] = (unsigned long)_orig.nraddr; \
5304 _argvec[1] = (unsigned long)(arg1); \
5305 _argvec[2] = (unsigned long)(arg2); \
5306 _argvec[3] = (unsigned long)(arg3); \
5307 _argvec[4] = (unsigned long)(arg4); \
5308 __asm__ volatile( \
5309 "subu $29, $29, 8 \n\t" \
5310 "sw $28, 0($29) \n\t" \
5311 "sw $31, 4($29) \n\t" \
5312 "subu $29, $29, 16 \n\t" \
5313 "lw $4, 4(%1) \n\t" \
5314 "lw $5, 8(%1) \n\t" \
5315 "lw $6, 12(%1) \n\t" \
5316 "lw $7, 16(%1) \n\t" \
5317 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5318 VALGRIND_CALL_NOREDIR_T9 \
5319 "addu $29, $29, 16 \n\t" \
5320 "lw $28, 0($29) \n\t" \
5321 "lw $31, 4($29) \n\t" \
5322 "addu $29, $29, 8 \n\t" \
5323 "move %0, $2\n" \
5324 : /*out*/ "=r" (_res) \
5325 : /*in*/ "0" (&_argvec[0]) \
5326 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5327 ); \
5328 lval = (__typeof__(lval)) _res; \
5329 } while (0)
5330
5331#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5332 do { \
5333 volatile OrigFn _orig = (orig); \
5334 volatile unsigned long _argvec[6]; \
5335 volatile unsigned long _res; \
5336 _argvec[0] = (unsigned long)_orig.nraddr; \
5337 _argvec[1] = (unsigned long)(arg1); \
5338 _argvec[2] = (unsigned long)(arg2); \
5339 _argvec[3] = (unsigned long)(arg3); \
5340 _argvec[4] = (unsigned long)(arg4); \
5341 _argvec[5] = (unsigned long)(arg5); \
5342 __asm__ volatile( \
5343 "subu $29, $29, 8 \n\t" \
5344 "sw $28, 0($29) \n\t" \
5345 "sw $31, 4($29) \n\t" \
5346 "lw $4, 20(%1) \n\t" \
5347 "subu $29, $29, 24\n\t" \
5348 "sw $4, 16($29) \n\t" \
5349 "lw $4, 4(%1) \n\t" \
5350 "lw $5, 8(%1) \n\t" \
5351 "lw $6, 12(%1) \n\t" \
5352 "lw $7, 16(%1) \n\t" \
5353 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5354 VALGRIND_CALL_NOREDIR_T9 \
5355 "addu $29, $29, 24 \n\t" \
5356 "lw $28, 0($29) \n\t" \
5357 "lw $31, 4($29) \n\t" \
5358 "addu $29, $29, 8 \n\t" \
5359 "move %0, $2\n" \
5360 : /*out*/ "=r" (_res) \
5361 : /*in*/ "0" (&_argvec[0]) \
5362 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5363 ); \
5364 lval = (__typeof__(lval)) _res; \
5365 } while (0)
5366#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5367 do { \
5368 volatile OrigFn _orig = (orig); \
5369 volatile unsigned long _argvec[7]; \
5370 volatile unsigned long _res; \
5371 _argvec[0] = (unsigned long)_orig.nraddr; \
5372 _argvec[1] = (unsigned long)(arg1); \
5373 _argvec[2] = (unsigned long)(arg2); \
5374 _argvec[3] = (unsigned long)(arg3); \
5375 _argvec[4] = (unsigned long)(arg4); \
5376 _argvec[5] = (unsigned long)(arg5); \
5377 _argvec[6] = (unsigned long)(arg6); \
5378 __asm__ volatile( \
5379 "subu $29, $29, 8 \n\t" \
5380 "sw $28, 0($29) \n\t" \
5381 "sw $31, 4($29) \n\t" \
5382 "lw $4, 20(%1) \n\t" \
5383 "subu $29, $29, 32\n\t" \
5384 "sw $4, 16($29) \n\t" \
5385 "lw $4, 24(%1) \n\t" \
5386 "nop\n\t" \
5387 "sw $4, 20($29) \n\t" \
5388 "lw $4, 4(%1) \n\t" \
5389 "lw $5, 8(%1) \n\t" \
5390 "lw $6, 12(%1) \n\t" \
5391 "lw $7, 16(%1) \n\t" \
5392 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5393 VALGRIND_CALL_NOREDIR_T9 \
5394 "addu $29, $29, 32 \n\t" \
5395 "lw $28, 0($29) \n\t" \
5396 "lw $31, 4($29) \n\t" \
5397 "addu $29, $29, 8 \n\t" \
5398 "move %0, $2\n" \
5399 : /*out*/ "=r" (_res) \
5400 : /*in*/ "0" (&_argvec[0]) \
5401 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5402 ); \
5403 lval = (__typeof__(lval)) _res; \
5404 } while (0)
5405
5406#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5407 arg7) \
5408 do { \
5409 volatile OrigFn _orig = (orig); \
5410 volatile unsigned long _argvec[8]; \
5411 volatile unsigned long _res; \
5412 _argvec[0] = (unsigned long)_orig.nraddr; \
5413 _argvec[1] = (unsigned long)(arg1); \
5414 _argvec[2] = (unsigned long)(arg2); \
5415 _argvec[3] = (unsigned long)(arg3); \
5416 _argvec[4] = (unsigned long)(arg4); \
5417 _argvec[5] = (unsigned long)(arg5); \
5418 _argvec[6] = (unsigned long)(arg6); \
5419 _argvec[7] = (unsigned long)(arg7); \
5420 __asm__ volatile( \
5421 "subu $29, $29, 8 \n\t" \
5422 "sw $28, 0($29) \n\t" \
5423 "sw $31, 4($29) \n\t" \
5424 "lw $4, 20(%1) \n\t" \
5425 "subu $29, $29, 32\n\t" \
5426 "sw $4, 16($29) \n\t" \
5427 "lw $4, 24(%1) \n\t" \
5428 "sw $4, 20($29) \n\t" \
5429 "lw $4, 28(%1) \n\t" \
5430 "sw $4, 24($29) \n\t" \
5431 "lw $4, 4(%1) \n\t" \
5432 "lw $5, 8(%1) \n\t" \
5433 "lw $6, 12(%1) \n\t" \
5434 "lw $7, 16(%1) \n\t" \
5435 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5436 VALGRIND_CALL_NOREDIR_T9 \
5437 "addu $29, $29, 32 \n\t" \
5438 "lw $28, 0($29) \n\t" \
5439 "lw $31, 4($29) \n\t" \
5440 "addu $29, $29, 8 \n\t" \
5441 "move %0, $2\n" \
5442 : /*out*/ "=r" (_res) \
5443 : /*in*/ "0" (&_argvec[0]) \
5444 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5445 ); \
5446 lval = (__typeof__(lval)) _res; \
5447 } while (0)
5448
5449#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5450 arg7,arg8) \
5451 do { \
5452 volatile OrigFn _orig = (orig); \
5453 volatile unsigned long _argvec[9]; \
5454 volatile unsigned long _res; \
5455 _argvec[0] = (unsigned long)_orig.nraddr; \
5456 _argvec[1] = (unsigned long)(arg1); \
5457 _argvec[2] = (unsigned long)(arg2); \
5458 _argvec[3] = (unsigned long)(arg3); \
5459 _argvec[4] = (unsigned long)(arg4); \
5460 _argvec[5] = (unsigned long)(arg5); \
5461 _argvec[6] = (unsigned long)(arg6); \
5462 _argvec[7] = (unsigned long)(arg7); \
5463 _argvec[8] = (unsigned long)(arg8); \
5464 __asm__ volatile( \
5465 "subu $29, $29, 8 \n\t" \
5466 "sw $28, 0($29) \n\t" \
5467 "sw $31, 4($29) \n\t" \
5468 "lw $4, 20(%1) \n\t" \
5469 "subu $29, $29, 40\n\t" \
5470 "sw $4, 16($29) \n\t" \
5471 "lw $4, 24(%1) \n\t" \
5472 "sw $4, 20($29) \n\t" \
5473 "lw $4, 28(%1) \n\t" \
5474 "sw $4, 24($29) \n\t" \
5475 "lw $4, 32(%1) \n\t" \
5476 "sw $4, 28($29) \n\t" \
5477 "lw $4, 4(%1) \n\t" \
5478 "lw $5, 8(%1) \n\t" \
5479 "lw $6, 12(%1) \n\t" \
5480 "lw $7, 16(%1) \n\t" \
5481 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5482 VALGRIND_CALL_NOREDIR_T9 \
5483 "addu $29, $29, 40 \n\t" \
5484 "lw $28, 0($29) \n\t" \
5485 "lw $31, 4($29) \n\t" \
5486 "addu $29, $29, 8 \n\t" \
5487 "move %0, $2\n" \
5488 : /*out*/ "=r" (_res) \
5489 : /*in*/ "0" (&_argvec[0]) \
5490 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5491 ); \
5492 lval = (__typeof__(lval)) _res; \
5493 } while (0)
5494
5495#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5496 arg7,arg8,arg9) \
5497 do { \
5498 volatile OrigFn _orig = (orig); \
5499 volatile unsigned long _argvec[10]; \
5500 volatile unsigned long _res; \
5501 _argvec[0] = (unsigned long)_orig.nraddr; \
5502 _argvec[1] = (unsigned long)(arg1); \
5503 _argvec[2] = (unsigned long)(arg2); \
5504 _argvec[3] = (unsigned long)(arg3); \
5505 _argvec[4] = (unsigned long)(arg4); \
5506 _argvec[5] = (unsigned long)(arg5); \
5507 _argvec[6] = (unsigned long)(arg6); \
5508 _argvec[7] = (unsigned long)(arg7); \
5509 _argvec[8] = (unsigned long)(arg8); \
5510 _argvec[9] = (unsigned long)(arg9); \
5511 __asm__ volatile( \
5512 "subu $29, $29, 8 \n\t" \
5513 "sw $28, 0($29) \n\t" \
5514 "sw $31, 4($29) \n\t" \
5515 "lw $4, 20(%1) \n\t" \
5516 "subu $29, $29, 40\n\t" \
5517 "sw $4, 16($29) \n\t" \
5518 "lw $4, 24(%1) \n\t" \
5519 "sw $4, 20($29) \n\t" \
5520 "lw $4, 28(%1) \n\t" \
5521 "sw $4, 24($29) \n\t" \
5522 "lw $4, 32(%1) \n\t" \
5523 "sw $4, 28($29) \n\t" \
5524 "lw $4, 36(%1) \n\t" \
5525 "sw $4, 32($29) \n\t" \
5526 "lw $4, 4(%1) \n\t" \
5527 "lw $5, 8(%1) \n\t" \
5528 "lw $6, 12(%1) \n\t" \
5529 "lw $7, 16(%1) \n\t" \
5530 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5531 VALGRIND_CALL_NOREDIR_T9 \
5532 "addu $29, $29, 40 \n\t" \
5533 "lw $28, 0($29) \n\t" \
5534 "lw $31, 4($29) \n\t" \
5535 "addu $29, $29, 8 \n\t" \
5536 "move %0, $2\n" \
5537 : /*out*/ "=r" (_res) \
5538 : /*in*/ "0" (&_argvec[0]) \
5539 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5540 ); \
5541 lval = (__typeof__(lval)) _res; \
5542 } while (0)
5543
5544#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5545 arg7,arg8,arg9,arg10) \
5546 do { \
5547 volatile OrigFn _orig = (orig); \
5548 volatile unsigned long _argvec[11]; \
5549 volatile unsigned long _res; \
5550 _argvec[0] = (unsigned long)_orig.nraddr; \
5551 _argvec[1] = (unsigned long)(arg1); \
5552 _argvec[2] = (unsigned long)(arg2); \
5553 _argvec[3] = (unsigned long)(arg3); \
5554 _argvec[4] = (unsigned long)(arg4); \
5555 _argvec[5] = (unsigned long)(arg5); \
5556 _argvec[6] = (unsigned long)(arg6); \
5557 _argvec[7] = (unsigned long)(arg7); \
5558 _argvec[8] = (unsigned long)(arg8); \
5559 _argvec[9] = (unsigned long)(arg9); \
5560 _argvec[10] = (unsigned long)(arg10); \
5561 __asm__ volatile( \
5562 "subu $29, $29, 8 \n\t" \
5563 "sw $28, 0($29) \n\t" \
5564 "sw $31, 4($29) \n\t" \
5565 "lw $4, 20(%1) \n\t" \
5566 "subu $29, $29, 48\n\t" \
5567 "sw $4, 16($29) \n\t" \
5568 "lw $4, 24(%1) \n\t" \
5569 "sw $4, 20($29) \n\t" \
5570 "lw $4, 28(%1) \n\t" \
5571 "sw $4, 24($29) \n\t" \
5572 "lw $4, 32(%1) \n\t" \
5573 "sw $4, 28($29) \n\t" \
5574 "lw $4, 36(%1) \n\t" \
5575 "sw $4, 32($29) \n\t" \
5576 "lw $4, 40(%1) \n\t" \
5577 "sw $4, 36($29) \n\t" \
5578 "lw $4, 4(%1) \n\t" \
5579 "lw $5, 8(%1) \n\t" \
5580 "lw $6, 12(%1) \n\t" \
5581 "lw $7, 16(%1) \n\t" \
5582 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5583 VALGRIND_CALL_NOREDIR_T9 \
5584 "addu $29, $29, 48 \n\t" \
5585 "lw $28, 0($29) \n\t" \
5586 "lw $31, 4($29) \n\t" \
5587 "addu $29, $29, 8 \n\t" \
5588 "move %0, $2\n" \
5589 : /*out*/ "=r" (_res) \
5590 : /*in*/ "0" (&_argvec[0]) \
5591 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5592 ); \
5593 lval = (__typeof__(lval)) _res; \
5594 } while (0)
5595
5596#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5597 arg6,arg7,arg8,arg9,arg10, \
5598 arg11) \
5599 do { \
5600 volatile OrigFn _orig = (orig); \
5601 volatile unsigned long _argvec[12]; \
5602 volatile unsigned long _res; \
5603 _argvec[0] = (unsigned long)_orig.nraddr; \
5604 _argvec[1] = (unsigned long)(arg1); \
5605 _argvec[2] = (unsigned long)(arg2); \
5606 _argvec[3] = (unsigned long)(arg3); \
5607 _argvec[4] = (unsigned long)(arg4); \
5608 _argvec[5] = (unsigned long)(arg5); \
5609 _argvec[6] = (unsigned long)(arg6); \
5610 _argvec[7] = (unsigned long)(arg7); \
5611 _argvec[8] = (unsigned long)(arg8); \
5612 _argvec[9] = (unsigned long)(arg9); \
5613 _argvec[10] = (unsigned long)(arg10); \
5614 _argvec[11] = (unsigned long)(arg11); \
5615 __asm__ volatile( \
5616 "subu $29, $29, 8 \n\t" \
5617 "sw $28, 0($29) \n\t" \
5618 "sw $31, 4($29) \n\t" \
5619 "lw $4, 20(%1) \n\t" \
5620 "subu $29, $29, 48\n\t" \
5621 "sw $4, 16($29) \n\t" \
5622 "lw $4, 24(%1) \n\t" \
5623 "sw $4, 20($29) \n\t" \
5624 "lw $4, 28(%1) \n\t" \
5625 "sw $4, 24($29) \n\t" \
5626 "lw $4, 32(%1) \n\t" \
5627 "sw $4, 28($29) \n\t" \
5628 "lw $4, 36(%1) \n\t" \
5629 "sw $4, 32($29) \n\t" \
5630 "lw $4, 40(%1) \n\t" \
5631 "sw $4, 36($29) \n\t" \
5632 "lw $4, 44(%1) \n\t" \
5633 "sw $4, 40($29) \n\t" \
5634 "lw $4, 4(%1) \n\t" \
5635 "lw $5, 8(%1) \n\t" \
5636 "lw $6, 12(%1) \n\t" \
5637 "lw $7, 16(%1) \n\t" \
5638 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5639 VALGRIND_CALL_NOREDIR_T9 \
5640 "addu $29, $29, 48 \n\t" \
5641 "lw $28, 0($29) \n\t" \
5642 "lw $31, 4($29) \n\t" \
5643 "addu $29, $29, 8 \n\t" \
5644 "move %0, $2\n" \
5645 : /*out*/ "=r" (_res) \
5646 : /*in*/ "0" (&_argvec[0]) \
5647 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5648 ); \
5649 lval = (__typeof__(lval)) _res; \
5650 } while (0)
5651
5652#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5653 arg6,arg7,arg8,arg9,arg10, \
5654 arg11,arg12) \
5655 do { \
5656 volatile OrigFn _orig = (orig); \
5657 volatile unsigned long _argvec[13]; \
5658 volatile unsigned long _res; \
5659 _argvec[0] = (unsigned long)_orig.nraddr; \
5660 _argvec[1] = (unsigned long)(arg1); \
5661 _argvec[2] = (unsigned long)(arg2); \
5662 _argvec[3] = (unsigned long)(arg3); \
5663 _argvec[4] = (unsigned long)(arg4); \
5664 _argvec[5] = (unsigned long)(arg5); \
5665 _argvec[6] = (unsigned long)(arg6); \
5666 _argvec[7] = (unsigned long)(arg7); \
5667 _argvec[8] = (unsigned long)(arg8); \
5668 _argvec[9] = (unsigned long)(arg9); \
5669 _argvec[10] = (unsigned long)(arg10); \
5670 _argvec[11] = (unsigned long)(arg11); \
5671 _argvec[12] = (unsigned long)(arg12); \
5672 __asm__ volatile( \
5673 "subu $29, $29, 8 \n\t" \
5674 "sw $28, 0($29) \n\t" \
5675 "sw $31, 4($29) \n\t" \
5676 "lw $4, 20(%1) \n\t" \
5677 "subu $29, $29, 56\n\t" \
5678 "sw $4, 16($29) \n\t" \
5679 "lw $4, 24(%1) \n\t" \
5680 "sw $4, 20($29) \n\t" \
5681 "lw $4, 28(%1) \n\t" \
5682 "sw $4, 24($29) \n\t" \
5683 "lw $4, 32(%1) \n\t" \
5684 "sw $4, 28($29) \n\t" \
5685 "lw $4, 36(%1) \n\t" \
5686 "sw $4, 32($29) \n\t" \
5687 "lw $4, 40(%1) \n\t" \
5688 "sw $4, 36($29) \n\t" \
5689 "lw $4, 44(%1) \n\t" \
5690 "sw $4, 40($29) \n\t" \
5691 "lw $4, 48(%1) \n\t" \
5692 "sw $4, 44($29) \n\t" \
5693 "lw $4, 4(%1) \n\t" \
5694 "lw $5, 8(%1) \n\t" \
5695 "lw $6, 12(%1) \n\t" \
5696 "lw $7, 16(%1) \n\t" \
5697 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5698 VALGRIND_CALL_NOREDIR_T9 \
5699 "addu $29, $29, 56 \n\t" \
5700 "lw $28, 0($29) \n\t" \
5701 "lw $31, 4($29) \n\t" \
5702 "addu $29, $29, 8 \n\t" \
5703 "move %0, $2\n" \
5704 : /*out*/ "=r" (_res) \
5705 : /*in*/ "r" (&_argvec[0]) \
5706 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5707 ); \
5708 lval = (__typeof__(lval)) _res; \
5709 } while (0)
5710
5711#endif /* PLAT_mips32_linux */
5712
5713/* ------------------------- nanomips-linux -------------------- */
5714
5715#if defined(PLAT_nanomips_linux)
5716
5717/* These regs are trashed by the hidden call. */
5718#define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5719"$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5720"$t8","$t9", "$at"
5721
5722/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5723 long) == 4. */
5724
5725#define CALL_FN_W_v(lval, orig) \
5726 do { \
5727 volatile OrigFn _orig = (orig); \
5728 volatile unsigned long _argvec[1]; \
5729 volatile unsigned long _res; \
5730 _argvec[0] = (unsigned long)_orig.nraddr; \
5731 __asm__ volatile( \
5732 "lw $t9, 0(%1)\n\t" \
5733 VALGRIND_CALL_NOREDIR_T9 \
5734 "move %0, $a0\n" \
5735 : /*out*/ "=r" (_res) \
5736 : /*in*/ "r" (&_argvec[0]) \
5737 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5738 ); \
5739 lval = (__typeof__(lval)) _res; \
5740 } while (0)
5741
5742#define CALL_FN_W_W(lval, orig, arg1) \
5743 do { \
5744 volatile OrigFn _orig = (orig); \
5745 volatile unsigned long _argvec[2]; \
5746 volatile unsigned long _res; \
5747 _argvec[0] = (unsigned long)_orig.nraddr; \
5748 _argvec[1] = (unsigned long)(arg1); \
5749 __asm__ volatile( \
5750 "lw $t9, 0(%1)\n\t" \
5751 "lw $a0, 4(%1)\n\t" \
5752 VALGRIND_CALL_NOREDIR_T9 \
5753 "move %0, $a0\n" \
5754 : /*out*/ "=r" (_res) \
5755 : /*in*/ "r" (&_argvec[0]) \
5756 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5757 ); \
5758 lval = (__typeof__(lval)) _res; \
5759 } while (0)
5760
5761#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5762 do { \
5763 volatile OrigFn _orig = (orig); \
5764 volatile unsigned long _argvec[3]; \
5765 volatile unsigned long _res; \
5766 _argvec[0] = (unsigned long)_orig.nraddr; \
5767 _argvec[1] = (unsigned long)(arg1); \
5768 _argvec[2] = (unsigned long)(arg2); \
5769 __asm__ volatile( \
5770 "lw $t9, 0(%1)\n\t" \
5771 "lw $a0, 4(%1)\n\t" \
5772 "lw $a1, 8(%1)\n\t" \
5773 VALGRIND_CALL_NOREDIR_T9 \
5774 "move %0, $a0\n" \
5775 : /*out*/ "=r" (_res) \
5776 : /*in*/ "r" (&_argvec[0]) \
5777 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5778 ); \
5779 lval = (__typeof__(lval)) _res; \
5780 } while (0)
5781
5782#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5783 do { \
5784 volatile OrigFn _orig = (orig); \
5785 volatile unsigned long _argvec[4]; \
5786 volatile unsigned long _res; \
5787 _argvec[0] = (unsigned long)_orig.nraddr; \
5788 _argvec[1] = (unsigned long)(arg1); \
5789 _argvec[2] = (unsigned long)(arg2); \
5790 _argvec[3] = (unsigned long)(arg3); \
5791 __asm__ volatile( \
5792 "lw $t9, 0(%1)\n\t" \
5793 "lw $a0, 4(%1)\n\t" \
5794 "lw $a1, 8(%1)\n\t" \
5795 "lw $a2,12(%1)\n\t" \
5796 VALGRIND_CALL_NOREDIR_T9 \
5797 "move %0, $a0\n" \
5798 : /*out*/ "=r" (_res) \
5799 : /*in*/ "r" (&_argvec[0]) \
5800 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5801 ); \
5802 lval = (__typeof__(lval)) _res; \
5803 } while (0)
5804
5805#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5806 do { \
5807 volatile OrigFn _orig = (orig); \
5808 volatile unsigned long _argvec[5]; \
5809 volatile unsigned long _res; \
5810 _argvec[0] = (unsigned long)_orig.nraddr; \
5811 _argvec[1] = (unsigned long)(arg1); \
5812 _argvec[2] = (unsigned long)(arg2); \
5813 _argvec[3] = (unsigned long)(arg3); \
5814 _argvec[4] = (unsigned long)(arg4); \
5815 __asm__ volatile( \
5816 "lw $t9, 0(%1)\n\t" \
5817 "lw $a0, 4(%1)\n\t" \
5818 "lw $a1, 8(%1)\n\t" \
5819 "lw $a2,12(%1)\n\t" \
5820 "lw $a3,16(%1)\n\t" \
5821 VALGRIND_CALL_NOREDIR_T9 \
5822 "move %0, $a0\n" \
5823 : /*out*/ "=r" (_res) \
5824 : /*in*/ "r" (&_argvec[0]) \
5825 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5826 ); \
5827 lval = (__typeof__(lval)) _res; \
5828 } while (0)
5829
5830#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5831 do { \
5832 volatile OrigFn _orig = (orig); \
5833 volatile unsigned long _argvec[6]; \
5834 volatile unsigned long _res; \
5835 _argvec[0] = (unsigned long)_orig.nraddr; \
5836 _argvec[1] = (unsigned long)(arg1); \
5837 _argvec[2] = (unsigned long)(arg2); \
5838 _argvec[3] = (unsigned long)(arg3); \
5839 _argvec[4] = (unsigned long)(arg4); \
5840 _argvec[5] = (unsigned long)(arg5); \
5841 __asm__ volatile( \
5842 "lw $t9, 0(%1)\n\t" \
5843 "lw $a0, 4(%1)\n\t" \
5844 "lw $a1, 8(%1)\n\t" \
5845 "lw $a2,12(%1)\n\t" \
5846 "lw $a3,16(%1)\n\t" \
5847 "lw $a4,20(%1)\n\t" \
5848 VALGRIND_CALL_NOREDIR_T9 \
5849 "move %0, $a0\n" \
5850 : /*out*/ "=r" (_res) \
5851 : /*in*/ "r" (&_argvec[0]) \
5852 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5853 ); \
5854 lval = (__typeof__(lval)) _res; \
5855 } while (0)
5856#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5857 do { \
5858 volatile OrigFn _orig = (orig); \
5859 volatile unsigned long _argvec[7]; \
5860 volatile unsigned long _res; \
5861 _argvec[0] = (unsigned long)_orig.nraddr; \
5862 _argvec[1] = (unsigned long)(arg1); \
5863 _argvec[2] = (unsigned long)(arg2); \
5864 _argvec[3] = (unsigned long)(arg3); \
5865 _argvec[4] = (unsigned long)(arg4); \
5866 _argvec[5] = (unsigned long)(arg5); \
5867 _argvec[6] = (unsigned long)(arg6); \
5868 __asm__ volatile( \
5869 "lw $t9, 0(%1)\n\t" \
5870 "lw $a0, 4(%1)\n\t" \
5871 "lw $a1, 8(%1)\n\t" \
5872 "lw $a2,12(%1)\n\t" \
5873 "lw $a3,16(%1)\n\t" \
5874 "lw $a4,20(%1)\n\t" \
5875 "lw $a5,24(%1)\n\t" \
5876 VALGRIND_CALL_NOREDIR_T9 \
5877 "move %0, $a0\n" \
5878 : /*out*/ "=r" (_res) \
5879 : /*in*/ "r" (&_argvec[0]) \
5880 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5881 ); \
5882 lval = (__typeof__(lval)) _res; \
5883 } while (0)
5884
5885#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5886 arg7) \
5887 do { \
5888 volatile OrigFn _orig = (orig); \
5889 volatile unsigned long _argvec[8]; \
5890 volatile unsigned long _res; \
5891 _argvec[0] = (unsigned long)_orig.nraddr; \
5892 _argvec[1] = (unsigned long)(arg1); \
5893 _argvec[2] = (unsigned long)(arg2); \
5894 _argvec[3] = (unsigned long)(arg3); \
5895 _argvec[4] = (unsigned long)(arg4); \
5896 _argvec[5] = (unsigned long)(arg5); \
5897 _argvec[6] = (unsigned long)(arg6); \
5898 _argvec[7] = (unsigned long)(arg7); \
5899 __asm__ volatile( \
5900 "lw $t9, 0(%1)\n\t" \
5901 "lw $a0, 4(%1)\n\t" \
5902 "lw $a1, 8(%1)\n\t" \
5903 "lw $a2,12(%1)\n\t" \
5904 "lw $a3,16(%1)\n\t" \
5905 "lw $a4,20(%1)\n\t" \
5906 "lw $a5,24(%1)\n\t" \
5907 "lw $a6,28(%1)\n\t" \
5908 VALGRIND_CALL_NOREDIR_T9 \
5909 "move %0, $a0\n" \
5910 : /*out*/ "=r" (_res) \
5911 : /*in*/ "r" (&_argvec[0]) \
5912 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5913 ); \
5914 lval = (__typeof__(lval)) _res; \
5915 } while (0)
5916
5917#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5918 arg7,arg8) \
5919 do { \
5920 volatile OrigFn _orig = (orig); \
5921 volatile unsigned long _argvec[9]; \
5922 volatile unsigned long _res; \
5923 _argvec[0] = (unsigned long)_orig.nraddr; \
5924 _argvec[1] = (unsigned long)(arg1); \
5925 _argvec[2] = (unsigned long)(arg2); \
5926 _argvec[3] = (unsigned long)(arg3); \
5927 _argvec[4] = (unsigned long)(arg4); \
5928 _argvec[5] = (unsigned long)(arg5); \
5929 _argvec[6] = (unsigned long)(arg6); \
5930 _argvec[7] = (unsigned long)(arg7); \
5931 _argvec[8] = (unsigned long)(arg8); \
5932 __asm__ volatile( \
5933 "lw $t9, 0(%1)\n\t" \
5934 "lw $a0, 4(%1)\n\t" \
5935 "lw $a1, 8(%1)\n\t" \
5936 "lw $a2,12(%1)\n\t" \
5937 "lw $a3,16(%1)\n\t" \
5938 "lw $a4,20(%1)\n\t" \
5939 "lw $a5,24(%1)\n\t" \
5940 "lw $a6,28(%1)\n\t" \
5941 "lw $a7,32(%1)\n\t" \
5942 VALGRIND_CALL_NOREDIR_T9 \
5943 "move %0, $a0\n" \
5944 : /*out*/ "=r" (_res) \
5945 : /*in*/ "r" (&_argvec[0]) \
5946 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5947 ); \
5948 lval = (__typeof__(lval)) _res; \
5949 } while (0)
5950
5951#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5952 arg7,arg8,arg9) \
5953 do { \
5954 volatile OrigFn _orig = (orig); \
5955 volatile unsigned long _argvec[10]; \
5956 volatile unsigned long _res; \
5957 _argvec[0] = (unsigned long)_orig.nraddr; \
5958 _argvec[1] = (unsigned long)(arg1); \
5959 _argvec[2] = (unsigned long)(arg2); \
5960 _argvec[3] = (unsigned long)(arg3); \
5961 _argvec[4] = (unsigned long)(arg4); \
5962 _argvec[5] = (unsigned long)(arg5); \
5963 _argvec[6] = (unsigned long)(arg6); \
5964 _argvec[7] = (unsigned long)(arg7); \
5965 _argvec[8] = (unsigned long)(arg8); \
5966 _argvec[9] = (unsigned long)(arg9); \
5967 __asm__ volatile( \
5968 "addiu $sp, $sp, -16 \n\t" \
5969 "lw $t9,36(%1) \n\t" \
5970 "sw $t9, 0($sp) \n\t" \
5971 "lw $t9, 0(%1) \n\t" \
5972 "lw $a0, 4(%1) \n\t" \
5973 "lw $a1, 8(%1) \n\t" \
5974 "lw $a2,12(%1) \n\t" \
5975 "lw $a3,16(%1) \n\t" \
5976 "lw $a4,20(%1) \n\t" \
5977 "lw $a5,24(%1) \n\t" \
5978 "lw $a6,28(%1) \n\t" \
5979 "lw $a7,32(%1) \n\t" \
5980 VALGRIND_CALL_NOREDIR_T9 \
5981 "move %0, $a0 \n\t" \
5982 "addiu $sp, $sp, 16 \n\t" \
5983 : /*out*/ "=r" (_res) \
5984 : /*in*/ "r" (&_argvec[0]) \
5985 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5986 ); \
5987 lval = (__typeof__(lval)) _res; \
5988 } while (0)
5989
5990#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5991 arg7,arg8,arg9,arg10) \
5992 do { \
5993 volatile OrigFn _orig = (orig); \
5994 volatile unsigned long _argvec[11]; \
5995 volatile unsigned long _res; \
5996 _argvec[0] = (unsigned long)_orig.nraddr; \
5997 _argvec[1] = (unsigned long)(arg1); \
5998 _argvec[2] = (unsigned long)(arg2); \
5999 _argvec[3] = (unsigned long)(arg3); \
6000 _argvec[4] = (unsigned long)(arg4); \
6001 _argvec[5] = (unsigned long)(arg5); \
6002 _argvec[6] = (unsigned long)(arg6); \
6003 _argvec[7] = (unsigned long)(arg7); \
6004 _argvec[8] = (unsigned long)(arg8); \
6005 _argvec[9] = (unsigned long)(arg9); \
6006 _argvec[10] = (unsigned long)(arg10); \
6007 __asm__ volatile( \
6008 "addiu $sp, $sp, -16 \n\t" \
6009 "lw $t9,36(%1) \n\t" \
6010 "sw $t9, 0($sp) \n\t" \
6011 "lw $t9,40(%1) \n\t" \
6012 "sw $t9, 4($sp) \n\t" \
6013 "lw $t9, 0(%1) \n\t" \
6014 "lw $a0, 4(%1) \n\t" \
6015 "lw $a1, 8(%1) \n\t" \
6016 "lw $a2,12(%1) \n\t" \
6017 "lw $a3,16(%1) \n\t" \
6018 "lw $a4,20(%1) \n\t" \
6019 "lw $a5,24(%1) \n\t" \
6020 "lw $a6,28(%1) \n\t" \
6021 "lw $a7,32(%1) \n\t" \
6022 VALGRIND_CALL_NOREDIR_T9 \
6023 "move %0, $a0 \n\t" \
6024 "addiu $sp, $sp, 16 \n\t" \
6025 : /*out*/ "=r" (_res) \
6026 : /*in*/ "r" (&_argvec[0]) \
6027 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6028 ); \
6029 lval = (__typeof__(lval)) _res; \
6030 } while (0)
6031
6032#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6033 arg6,arg7,arg8,arg9,arg10, \
6034 arg11) \
6035 do { \
6036 volatile OrigFn _orig = (orig); \
6037 volatile unsigned long _argvec[12]; \
6038 volatile unsigned long _res; \
6039 _argvec[0] = (unsigned long)_orig.nraddr; \
6040 _argvec[1] = (unsigned long)(arg1); \
6041 _argvec[2] = (unsigned long)(arg2); \
6042 _argvec[3] = (unsigned long)(arg3); \
6043 _argvec[4] = (unsigned long)(arg4); \
6044 _argvec[5] = (unsigned long)(arg5); \
6045 _argvec[6] = (unsigned long)(arg6); \
6046 _argvec[7] = (unsigned long)(arg7); \
6047 _argvec[8] = (unsigned long)(arg8); \
6048 _argvec[9] = (unsigned long)(arg9); \
6049 _argvec[10] = (unsigned long)(arg10); \
6050 _argvec[11] = (unsigned long)(arg11); \
6051 __asm__ volatile( \
6052 "addiu $sp, $sp, -16 \n\t" \
6053 "lw $t9,36(%1) \n\t" \
6054 "sw $t9, 0($sp) \n\t" \
6055 "lw $t9,40(%1) \n\t" \
6056 "sw $t9, 4($sp) \n\t" \
6057 "lw $t9,44(%1) \n\t" \
6058 "sw $t9, 8($sp) \n\t" \
6059 "lw $t9, 0(%1) \n\t" \
6060 "lw $a0, 4(%1) \n\t" \
6061 "lw $a1, 8(%1) \n\t" \
6062 "lw $a2,12(%1) \n\t" \
6063 "lw $a3,16(%1) \n\t" \
6064 "lw $a4,20(%1) \n\t" \
6065 "lw $a5,24(%1) \n\t" \
6066 "lw $a6,28(%1) \n\t" \
6067 "lw $a7,32(%1) \n\t" \
6068 VALGRIND_CALL_NOREDIR_T9 \
6069 "move %0, $a0 \n\t" \
6070 "addiu $sp, $sp, 16 \n\t" \
6071 : /*out*/ "=r" (_res) \
6072 : /*in*/ "r" (&_argvec[0]) \
6073 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6074 ); \
6075 lval = (__typeof__(lval)) _res; \
6076 } while (0)
6077
6078#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6079 arg6,arg7,arg8,arg9,arg10, \
6080 arg11,arg12) \
6081 do { \
6082 volatile OrigFn _orig = (orig); \
6083 volatile unsigned long _argvec[13]; \
6084 volatile unsigned long _res; \
6085 _argvec[0] = (unsigned long)_orig.nraddr; \
6086 _argvec[1] = (unsigned long)(arg1); \
6087 _argvec[2] = (unsigned long)(arg2); \
6088 _argvec[3] = (unsigned long)(arg3); \
6089 _argvec[4] = (unsigned long)(arg4); \
6090 _argvec[5] = (unsigned long)(arg5); \
6091 _argvec[6] = (unsigned long)(arg6); \
6092 _argvec[7] = (unsigned long)(arg7); \
6093 _argvec[8] = (unsigned long)(arg8); \
6094 _argvec[9] = (unsigned long)(arg9); \
6095 _argvec[10] = (unsigned long)(arg10); \
6096 _argvec[11] = (unsigned long)(arg11); \
6097 _argvec[12] = (unsigned long)(arg12); \
6098 __asm__ volatile( \
6099 "addiu $sp, $sp, -16 \n\t" \
6100 "lw $t9,36(%1) \n\t" \
6101 "sw $t9, 0($sp) \n\t" \
6102 "lw $t9,40(%1) \n\t" \
6103 "sw $t9, 4($sp) \n\t" \
6104 "lw $t9,44(%1) \n\t" \
6105 "sw $t9, 8($sp) \n\t" \
6106 "lw $t9,48(%1) \n\t" \
6107 "sw $t9,12($sp) \n\t" \
6108 "lw $t9, 0(%1) \n\t" \
6109 "lw $a0, 4(%1) \n\t" \
6110 "lw $a1, 8(%1) \n\t" \
6111 "lw $a2,12(%1) \n\t" \
6112 "lw $a3,16(%1) \n\t" \
6113 "lw $a4,20(%1) \n\t" \
6114 "lw $a5,24(%1) \n\t" \
6115 "lw $a6,28(%1) \n\t" \
6116 "lw $a7,32(%1) \n\t" \
6117 VALGRIND_CALL_NOREDIR_T9 \
6118 "move %0, $a0 \n\t" \
6119 "addiu $sp, $sp, 16 \n\t" \
6120 : /*out*/ "=r" (_res) \
6121 : /*in*/ "r" (&_argvec[0]) \
6122 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6123 ); \
6124 lval = (__typeof__(lval)) _res; \
6125 } while (0)
6126
6127#endif /* PLAT_nanomips_linux */
6128
6129/* ------------------------- mips64-linux ------------------------- */
6130
6131#if defined(PLAT_mips64_linux)
6132
6133/* These regs are trashed by the hidden call. */
6134#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6135"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6136"$25", "$31"
6137
6138/* These CALL_FN_ macros assume that on mips64-linux,
6139 sizeof(long long) == 8. */
6140
6141#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6142
6143#define CALL_FN_W_v(lval, orig) \
6144 do { \
6145 volatile OrigFn _orig = (orig); \
6146 volatile unsigned long long _argvec[1]; \
6147 volatile unsigned long long _res; \
6148 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6149 __asm__ volatile( \
6150 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6151 VALGRIND_CALL_NOREDIR_T9 \
6152 "move %0, $2\n" \
6153 : /*out*/ "=r" (_res) \
6154 : /*in*/ "0" (&_argvec[0]) \
6155 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6156 ); \
6157 lval = (__typeof__(lval)) (long)_res; \
6158 } while (0)
6159
6160#define CALL_FN_W_W(lval, orig, arg1) \
6161 do { \
6162 volatile OrigFn _orig = (orig); \
6163 volatile unsigned long long _argvec[2]; \
6164 volatile unsigned long long _res; \
6165 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6166 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6167 __asm__ volatile( \
6168 "ld $4, 8(%1)\n\t" /* arg1*/ \
6169 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6170 VALGRIND_CALL_NOREDIR_T9 \
6171 "move %0, $2\n" \
6172 : /*out*/ "=r" (_res) \
6173 : /*in*/ "r" (&_argvec[0]) \
6174 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6175 ); \
6176 lval = (__typeof__(lval)) (long)_res; \
6177 } while (0)
6178
6179#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6180 do { \
6181 volatile OrigFn _orig = (orig); \
6182 volatile unsigned long long _argvec[3]; \
6183 volatile unsigned long long _res; \
6184 _argvec[0] = _orig.nraddr; \
6185 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6186 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6187 __asm__ volatile( \
6188 "ld $4, 8(%1)\n\t" \
6189 "ld $5, 16(%1)\n\t" \
6190 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6191 VALGRIND_CALL_NOREDIR_T9 \
6192 "move %0, $2\n" \
6193 : /*out*/ "=r" (_res) \
6194 : /*in*/ "r" (&_argvec[0]) \
6195 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6196 ); \
6197 lval = (__typeof__(lval)) (long)_res; \
6198 } while (0)
6199
6200
6201#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6202 do { \
6203 volatile OrigFn _orig = (orig); \
6204 volatile unsigned long long _argvec[4]; \
6205 volatile unsigned long long _res; \
6206 _argvec[0] = _orig.nraddr; \
6207 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6208 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6209 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6210 __asm__ volatile( \
6211 "ld $4, 8(%1)\n\t" \
6212 "ld $5, 16(%1)\n\t" \
6213 "ld $6, 24(%1)\n\t" \
6214 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6215 VALGRIND_CALL_NOREDIR_T9 \
6216 "move %0, $2\n" \
6217 : /*out*/ "=r" (_res) \
6218 : /*in*/ "r" (&_argvec[0]) \
6219 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6220 ); \
6221 lval = (__typeof__(lval)) (long)_res; \
6222 } while (0)
6223
6224#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6225 do { \
6226 volatile OrigFn _orig = (orig); \
6227 volatile unsigned long long _argvec[5]; \
6228 volatile unsigned long long _res; \
6229 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6230 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6231 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6232 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6233 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6234 __asm__ volatile( \
6235 "ld $4, 8(%1)\n\t" \
6236 "ld $5, 16(%1)\n\t" \
6237 "ld $6, 24(%1)\n\t" \
6238 "ld $7, 32(%1)\n\t" \
6239 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6240 VALGRIND_CALL_NOREDIR_T9 \
6241 "move %0, $2\n" \
6242 : /*out*/ "=r" (_res) \
6243 : /*in*/ "r" (&_argvec[0]) \
6244 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6245 ); \
6246 lval = (__typeof__(lval)) (long)_res; \
6247 } while (0)
6248
6249#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6250 do { \
6251 volatile OrigFn _orig = (orig); \
6252 volatile unsigned long long _argvec[6]; \
6253 volatile unsigned long long _res; \
6254 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6255 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6256 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6257 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6258 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6259 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6260 __asm__ volatile( \
6261 "ld $4, 8(%1)\n\t" \
6262 "ld $5, 16(%1)\n\t" \
6263 "ld $6, 24(%1)\n\t" \
6264 "ld $7, 32(%1)\n\t" \
6265 "ld $8, 40(%1)\n\t" \
6266 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6267 VALGRIND_CALL_NOREDIR_T9 \
6268 "move %0, $2\n" \
6269 : /*out*/ "=r" (_res) \
6270 : /*in*/ "r" (&_argvec[0]) \
6271 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6272 ); \
6273 lval = (__typeof__(lval)) (long)_res; \
6274 } while (0)
6275
6276#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6277 do { \
6278 volatile OrigFn _orig = (orig); \
6279 volatile unsigned long long _argvec[7]; \
6280 volatile unsigned long long _res; \
6281 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6282 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6283 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6284 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6285 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6286 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6287 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6288 __asm__ volatile( \
6289 "ld $4, 8(%1)\n\t" \
6290 "ld $5, 16(%1)\n\t" \
6291 "ld $6, 24(%1)\n\t" \
6292 "ld $7, 32(%1)\n\t" \
6293 "ld $8, 40(%1)\n\t" \
6294 "ld $9, 48(%1)\n\t" \
6295 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6296 VALGRIND_CALL_NOREDIR_T9 \
6297 "move %0, $2\n" \
6298 : /*out*/ "=r" (_res) \
6299 : /*in*/ "r" (&_argvec[0]) \
6300 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6301 ); \
6302 lval = (__typeof__(lval)) (long)_res; \
6303 } while (0)
6304
6305#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6306 arg7) \
6307 do { \
6308 volatile OrigFn _orig = (orig); \
6309 volatile unsigned long long _argvec[8]; \
6310 volatile unsigned long long _res; \
6311 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6312 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6313 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6314 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6315 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6316 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6317 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6318 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6319 __asm__ volatile( \
6320 "ld $4, 8(%1)\n\t" \
6321 "ld $5, 16(%1)\n\t" \
6322 "ld $6, 24(%1)\n\t" \
6323 "ld $7, 32(%1)\n\t" \
6324 "ld $8, 40(%1)\n\t" \
6325 "ld $9, 48(%1)\n\t" \
6326 "ld $10, 56(%1)\n\t" \
6327 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6328 VALGRIND_CALL_NOREDIR_T9 \
6329 "move %0, $2\n" \
6330 : /*out*/ "=r" (_res) \
6331 : /*in*/ "r" (&_argvec[0]) \
6332 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6333 ); \
6334 lval = (__typeof__(lval)) (long)_res; \
6335 } while (0)
6336
6337#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6338 arg7,arg8) \
6339 do { \
6340 volatile OrigFn _orig = (orig); \
6341 volatile unsigned long long _argvec[9]; \
6342 volatile unsigned long long _res; \
6343 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6344 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6345 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6346 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6347 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6348 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6349 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6350 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6351 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6352 __asm__ volatile( \
6353 "ld $4, 8(%1)\n\t" \
6354 "ld $5, 16(%1)\n\t" \
6355 "ld $6, 24(%1)\n\t" \
6356 "ld $7, 32(%1)\n\t" \
6357 "ld $8, 40(%1)\n\t" \
6358 "ld $9, 48(%1)\n\t" \
6359 "ld $10, 56(%1)\n\t" \
6360 "ld $11, 64(%1)\n\t" \
6361 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6362 VALGRIND_CALL_NOREDIR_T9 \
6363 "move %0, $2\n" \
6364 : /*out*/ "=r" (_res) \
6365 : /*in*/ "r" (&_argvec[0]) \
6366 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6367 ); \
6368 lval = (__typeof__(lval)) (long)_res; \
6369 } while (0)
6370
6371#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6372 arg7,arg8,arg9) \
6373 do { \
6374 volatile OrigFn _orig = (orig); \
6375 volatile unsigned long long _argvec[10]; \
6376 volatile unsigned long long _res; \
6377 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6378 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6379 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6380 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6381 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6382 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6383 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6384 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6385 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6386 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6387 __asm__ volatile( \
6388 "dsubu $29, $29, 8\n\t" \
6389 "ld $4, 72(%1)\n\t" \
6390 "sd $4, 0($29)\n\t" \
6391 "ld $4, 8(%1)\n\t" \
6392 "ld $5, 16(%1)\n\t" \
6393 "ld $6, 24(%1)\n\t" \
6394 "ld $7, 32(%1)\n\t" \
6395 "ld $8, 40(%1)\n\t" \
6396 "ld $9, 48(%1)\n\t" \
6397 "ld $10, 56(%1)\n\t" \
6398 "ld $11, 64(%1)\n\t" \
6399 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6400 VALGRIND_CALL_NOREDIR_T9 \
6401 "daddu $29, $29, 8\n\t" \
6402 "move %0, $2\n" \
6403 : /*out*/ "=r" (_res) \
6404 : /*in*/ "r" (&_argvec[0]) \
6405 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6406 ); \
6407 lval = (__typeof__(lval)) (long)_res; \
6408 } while (0)
6409
6410#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6411 arg7,arg8,arg9,arg10) \
6412 do { \
6413 volatile OrigFn _orig = (orig); \
6414 volatile unsigned long long _argvec[11]; \
6415 volatile unsigned long long _res; \
6416 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6417 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6418 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6419 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6420 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6421 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6422 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6423 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6424 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6425 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6426 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6427 __asm__ volatile( \
6428 "dsubu $29, $29, 16\n\t" \
6429 "ld $4, 72(%1)\n\t" \
6430 "sd $4, 0($29)\n\t" \
6431 "ld $4, 80(%1)\n\t" \
6432 "sd $4, 8($29)\n\t" \
6433 "ld $4, 8(%1)\n\t" \
6434 "ld $5, 16(%1)\n\t" \
6435 "ld $6, 24(%1)\n\t" \
6436 "ld $7, 32(%1)\n\t" \
6437 "ld $8, 40(%1)\n\t" \
6438 "ld $9, 48(%1)\n\t" \
6439 "ld $10, 56(%1)\n\t" \
6440 "ld $11, 64(%1)\n\t" \
6441 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6442 VALGRIND_CALL_NOREDIR_T9 \
6443 "daddu $29, $29, 16\n\t" \
6444 "move %0, $2\n" \
6445 : /*out*/ "=r" (_res) \
6446 : /*in*/ "r" (&_argvec[0]) \
6447 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6448 ); \
6449 lval = (__typeof__(lval)) (long)_res; \
6450 } while (0)
6451
6452#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6453 arg6,arg7,arg8,arg9,arg10, \
6454 arg11) \
6455 do { \
6456 volatile OrigFn _orig = (orig); \
6457 volatile unsigned long long _argvec[12]; \
6458 volatile unsigned long long _res; \
6459 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6460 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6461 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6462 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6463 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6464 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6465 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6466 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6467 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6468 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6469 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6470 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6471 __asm__ volatile( \
6472 "dsubu $29, $29, 24\n\t" \
6473 "ld $4, 72(%1)\n\t" \
6474 "sd $4, 0($29)\n\t" \
6475 "ld $4, 80(%1)\n\t" \
6476 "sd $4, 8($29)\n\t" \
6477 "ld $4, 88(%1)\n\t" \
6478 "sd $4, 16($29)\n\t" \
6479 "ld $4, 8(%1)\n\t" \
6480 "ld $5, 16(%1)\n\t" \
6481 "ld $6, 24(%1)\n\t" \
6482 "ld $7, 32(%1)\n\t" \
6483 "ld $8, 40(%1)\n\t" \
6484 "ld $9, 48(%1)\n\t" \
6485 "ld $10, 56(%1)\n\t" \
6486 "ld $11, 64(%1)\n\t" \
6487 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6488 VALGRIND_CALL_NOREDIR_T9 \
6489 "daddu $29, $29, 24\n\t" \
6490 "move %0, $2\n" \
6491 : /*out*/ "=r" (_res) \
6492 : /*in*/ "r" (&_argvec[0]) \
6493 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6494 ); \
6495 lval = (__typeof__(lval)) (long)_res; \
6496 } while (0)
6497
6498#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6499 arg6,arg7,arg8,arg9,arg10, \
6500 arg11,arg12) \
6501 do { \
6502 volatile OrigFn _orig = (orig); \
6503 volatile unsigned long long _argvec[13]; \
6504 volatile unsigned long long _res; \
6505 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6506 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6507 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6508 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6509 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6510 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6511 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6512 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6513 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6514 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6515 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6516 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6517 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6518 __asm__ volatile( \
6519 "dsubu $29, $29, 32\n\t" \
6520 "ld $4, 72(%1)\n\t" \
6521 "sd $4, 0($29)\n\t" \
6522 "ld $4, 80(%1)\n\t" \
6523 "sd $4, 8($29)\n\t" \
6524 "ld $4, 88(%1)\n\t" \
6525 "sd $4, 16($29)\n\t" \
6526 "ld $4, 96(%1)\n\t" \
6527 "sd $4, 24($29)\n\t" \
6528 "ld $4, 8(%1)\n\t" \
6529 "ld $5, 16(%1)\n\t" \
6530 "ld $6, 24(%1)\n\t" \
6531 "ld $7, 32(%1)\n\t" \
6532 "ld $8, 40(%1)\n\t" \
6533 "ld $9, 48(%1)\n\t" \
6534 "ld $10, 56(%1)\n\t" \
6535 "ld $11, 64(%1)\n\t" \
6536 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6537 VALGRIND_CALL_NOREDIR_T9 \
6538 "daddu $29, $29, 32\n\t" \
6539 "move %0, $2\n" \
6540 : /*out*/ "=r" (_res) \
6541 : /*in*/ "r" (&_argvec[0]) \
6542 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6543 ); \
6544 lval = (__typeof__(lval)) (long)_res; \
6545 } while (0)
6546
6547#endif /* PLAT_mips64_linux */
6548
6549/* ------------------------------------------------------------------ */
6550/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6551/* */
6552/* ------------------------------------------------------------------ */
6553
6554/* Some request codes. There are many more of these, but most are not
6555 exposed to end-user view. These are the public ones, all of the
6556 form 0x1000 + small_number.
6557
6558 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6559 ones start at 0x2000.
6560*/
6561
6562/* These macros are used by tools -- they must be public, but don't
6563 embed them into other programs. */
6564#define VG_USERREQ_TOOL_BASE(a,b) \
6565 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6566#define VG_IS_TOOL_USERREQ(a, b, v) \
6567 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6568
6569/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6570 This enum comprises an ABI exported by Valgrind to programs
6571 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6572 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6573 relevant group. */
6574typedef
6575 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6576 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6577
6578 /* These allow any function to be called from the simulated
6579 CPU but run on the real CPU. Nb: the first arg passed to
6580 the function is always the ThreadId of the running
6581 thread! So CLIENT_CALL0 actually requires a 1 arg
6582 function, etc. */
6583 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6584 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6585 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6586 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6587
6588 /* Can be useful in regression testing suites -- eg. can
6589 send Valgrind's output to /dev/null and still count
6590 errors. */
6591 VG_USERREQ__COUNT_ERRORS = 0x1201,
6592
6593 /* Allows the client program and/or gdbserver to execute a monitor
6594 command. */
6595 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6596
6597 /* Allows the client program to change a dynamic command line
6598 option. */
6599 VG_USERREQ__CLO_CHANGE = 0x1203,
6600
6601 /* These are useful and can be interpreted by any tool that
6602 tracks malloc() et al, by using vg_replace_malloc.c. */
6603 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6604 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6605 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6606 /* Memory pool support. */
6607 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6608 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6609 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6610 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6611 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6612 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6613 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6614 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6615
6616 /* Allow printfs to valgrind log. */
6617 /* The first two pass the va_list argument by value, which
6618 assumes it is the same size as or smaller than a UWord,
6619 which generally isn't the case. Hence are deprecated.
6620 The second two pass the vargs by reference and so are
6621 immune to this problem. */
6622 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6623 VG_USERREQ__PRINTF = 0x1401,
6624 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6625 /* both :: char* fmt, va_list* vargs */
6626 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6627 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6628
6629 /* Stack support. */
6630 VG_USERREQ__STACK_REGISTER = 0x1501,
6631 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6632 VG_USERREQ__STACK_CHANGE = 0x1503,
6633
6634 /* Wine support */
6635 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6636
6637 /* Querying of debug info. */
6638 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6639
6640 /* Disable/enable error reporting level. Takes a single
6641 Word arg which is the delta to this thread's error
6642 disablement indicator. Hence 1 disables or further
6643 disables errors, and -1 moves back towards enablement.
6644 Other values are not allowed. */
6645 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6646
6647 /* Some requests used for Valgrind internal, such as
6648 self-test or self-hosting. */
6649 /* Initialise IR injection */
6650 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6651 /* Used by Inner Valgrind to inform Outer Valgrind where to
6652 find the list of inner guest threads */
6653 VG_USERREQ__INNER_THREADS = 0x1902
6654 } Vg_ClientRequest;
6655
6656#if !defined(__GNUC__)
6657# define __extension__ /* */
6658#endif
6659
6660
6661/* Returns the number of Valgrinds this code is running under. That
6662 is, 0 if running natively, 1 if running under Valgrind, 2 if
6663 running under Valgrind which is running under another Valgrind,
6664 etc. */
6665#define RUNNING_ON_VALGRIND \
6666 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6667 VG_USERREQ__RUNNING_ON_VALGRIND, \
6668 0, 0, 0, 0, 0) \
6669
6670
6671/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6672 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6673 since it provides a way to make sure valgrind will retranslate the
6674 invalidated area. Returns no value. */
6675#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6676 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6677 _qzz_addr, _qzz_len, 0, 0, 0)
6678
6679#define VALGRIND_INNER_THREADS(_qzz_addr) \
6680 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6681 _qzz_addr, 0, 0, 0, 0)
6682
6683
6684/* These requests are for getting Valgrind itself to print something.
6685 Possibly with a backtrace. This is a really ugly hack. The return value
6686 is the number of characters printed, excluding the "**<pid>** " part at the
6687 start and the backtrace (if present). */
6688
6689#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6690/* Modern GCC will optimize the static routine out if unused,
6691 and unused attribute will shut down warnings about it. */
6692static int VALGRIND_PRINTF(const char *format, ...)
6693 __attribute__((format(__printf__, 1, 2), __unused__));
6694#endif
6695static int
6696#if defined(_MSC_VER)
6697__inline
6698#endif
6699VALGRIND_PRINTF(const char *format, ...)
6700{
6701#if !IS_ENABLED(CONFIG_VALGRIND)
6702 (void)format;
6703 return 0;
6704#else /* CONFIG_VALGRIND */
6705#if defined(_MSC_VER) || defined(__MINGW64__)
6706 uintptr_t _qzz_res;
6707#else
6708 unsigned long _qzz_res;
6709#endif
6710 va_list vargs;
6711 va_start(vargs, format);
6712#if defined(_MSC_VER) || defined(__MINGW64__)
6713 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6714 VG_USERREQ__PRINTF_VALIST_BY_REF,
6715 (uintptr_t)format,
6716 (uintptr_t)&vargs,
6717 0, 0, 0);
6718#else
6719 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6720 VG_USERREQ__PRINTF_VALIST_BY_REF,
6721 (unsigned long)format,
6722 (unsigned long)&vargs,
6723 0, 0, 0);
6724#endif
6725 va_end(vargs);
6726 return (int)_qzz_res;
6727#endif /* CONFIG_VALGRIND */
6728}
6729
6730#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6731static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6732 __attribute__((format(__printf__, 1, 2), __unused__));
6733#endif
6734static int
6735#if defined(_MSC_VER)
6736__inline
6737#endif
6738VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6739{
6740#if !IS_ENABLED(CONFIG_VALGRIND)
6741 (void)format;
6742 return 0;
6743#else /* CONFIG_VALGRIND */
6744#if defined(_MSC_VER) || defined(__MINGW64__)
6745 uintptr_t _qzz_res;
6746#else
6747 unsigned long _qzz_res;
6748#endif
6749 va_list vargs;
6750 va_start(vargs, format);
6751#if defined(_MSC_VER) || defined(__MINGW64__)
6752 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6753 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6754 (uintptr_t)format,
6755 (uintptr_t)&vargs,
6756 0, 0, 0);
6757#else
6758 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6759 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6760 (unsigned long)format,
6761 (unsigned long)&vargs,
6762 0, 0, 0);
6763#endif
6764 va_end(vargs);
6765 return (int)_qzz_res;
6766#endif /* CONFIG_VALGRIND */
6767}
6768
6769
6770/* These requests allow control to move from the simulated CPU to the
6771 real CPU, calling an arbitrary function.
6772
6773 Note that the current ThreadId is inserted as the first argument.
6774 So this call:
6775
6776 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6777
6778 requires f to have this signature:
6779
6780 Word f(Word tid, Word arg1, Word arg2)
6781
6782 where "Word" is a word-sized type.
6783
6784 Note that these client requests are not entirely reliable. For example,
6785 if you call a function with them that subsequently calls printf(),
6786 there's a high chance Valgrind will crash. Generally, your prospects of
6787 these working are made higher if the called function does not refer to
6788 any global variables, and does not refer to any libc or other functions
6789 (printf et al). Any kind of entanglement with libc or dynamic linking is
6790 likely to have a bad outcome, for tricky reasons which we've grappled
6791 with a lot in the past.
6792*/
6793#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6794 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6795 VG_USERREQ__CLIENT_CALL0, \
6796 _qyy_fn, \
6797 0, 0, 0, 0)
6798
6799#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6800 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6801 VG_USERREQ__CLIENT_CALL1, \
6802 _qyy_fn, \
6803 _qyy_arg1, 0, 0, 0)
6804
6805#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6806 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6807 VG_USERREQ__CLIENT_CALL2, \
6808 _qyy_fn, \
6809 _qyy_arg1, _qyy_arg2, 0, 0)
6810
6811#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6812 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6813 VG_USERREQ__CLIENT_CALL3, \
6814 _qyy_fn, \
6815 _qyy_arg1, _qyy_arg2, \
6816 _qyy_arg3, 0)
6817
6818
6819/* Counts the number of errors that have been recorded by a tool. Nb:
6820 the tool must record the errors with VG_(maybe_record_error)() or
6821 VG_(unique_error)() for them to be counted. */
6822#define VALGRIND_COUNT_ERRORS \
6823 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6824 0 /* default return */, \
6825 VG_USERREQ__COUNT_ERRORS, \
6826 0, 0, 0, 0, 0)
6827
6828/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6829 when heap blocks are allocated in order to give accurate results. This
6830 happens automatically for the standard allocator functions such as
6831 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6832 delete[], etc.
6833
6834 But if your program uses a custom allocator, this doesn't automatically
6835 happen, and Valgrind will not do as well. For example, if you allocate
6836 superblocks with mmap() and then allocates chunks of the superblocks, all
6837 Valgrind's observations will be at the mmap() level and it won't know that
6838 the chunks should be considered separate entities. In Memcheck's case,
6839 that means you probably won't get heap block overrun detection (because
6840 there won't be redzones marked as unaddressable) and you definitely won't
6841 get any leak detection.
6842
6843 The following client requests allow a custom allocator to be annotated so
6844 that it can be handled accurately by Valgrind.
6845
6846 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6847 by a malloc()-like function. For Memcheck (an illustrative case), this
6848 does two things:
6849
6850 - It records that the block has been allocated. This means any addresses
6851 within the block mentioned in error messages will be
6852 identified as belonging to the block. It also means that if the block
6853 isn't freed it will be detected by the leak checker.
6854
6855 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6856 not set), or addressable and defined (if 'is_zeroed' is set). This
6857 controls how accesses to the block by the program are handled.
6858
6859 'addr' is the start of the usable block (ie. after any
6860 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6861 can apply redzones -- these are blocks of padding at the start and end of
6862 each block. Adding redzones is recommended as it makes it much more likely
6863 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6864 zeroed (or filled with another predictable value), as is the case for
6865 calloc().
6866
6867 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6868 heap block -- that will be used by the client program -- is allocated.
6869 It's best to put it at the outermost level of the allocator if possible;
6870 for example, if you have a function my_alloc() which calls
6871 internal_alloc(), and the client request is put inside internal_alloc(),
6872 stack traces relating to the heap block will contain entries for both
6873 my_alloc() and internal_alloc(), which is probably not what you want.
6874
6875 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6876 custom blocks from within a heap block, B, that has been allocated with
6877 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6878 -- the custom blocks will take precedence.
6879
6880 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6881 Memcheck, it does two things:
6882
6883 - It records that the block has been deallocated. This assumes that the
6884 block was annotated as having been allocated via
6885 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6886
6887 - It marks the block as being unaddressable.
6888
6889 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6890 heap block is deallocated.
6891
6892 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6893 Memcheck, it does four things:
6894
6895 - It records that the size of a block has been changed. This assumes that
6896 the block was annotated as having been allocated via
6897 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6898
6899 - If the block shrunk, it marks the freed memory as being unaddressable.
6900
6901 - If the block grew, it marks the new area as undefined and defines a red
6902 zone past the end of the new block.
6903
6904 - The V-bits of the overlap between the old and the new block are preserved.
6905
6906 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6907 and before deallocation of the old block.
6908
6909 In many cases, these three client requests will not be enough to get your
6910 allocator working well with Memcheck. More specifically, if your allocator
6911 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6912 will be necessary to mark the memory as addressable just before the zeroing
6913 occurs, otherwise you'll get a lot of invalid write errors. For example,
6914 you'll need to do this if your allocator recycles freed blocks, but it
6915 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6916 Alternatively, if your allocator reuses freed blocks for allocator-internal
6917 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6918
6919 Really, what's happening is a blurring of the lines between the client
6920 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6921 memory should be considered unaddressable to the client program, but the
6922 allocator knows more than the rest of the client program and so may be able
6923 to safely access it. Extra client requests are necessary for Valgrind to
6924 understand the distinction between the allocator and the rest of the
6925 program.
6926
6927 Ignored if addr == 0.
6928*/
6929#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6930 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6931 addr, sizeB, rzB, is_zeroed, 0)
6932
6933/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6934 Ignored if addr == 0.
6935*/
6936#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6937 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6938 addr, oldSizeB, newSizeB, rzB, 0)
6939
6940/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6941 Ignored if addr == 0.
6942*/
6943#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6944 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6945 addr, rzB, 0, 0, 0)
6946
6947/* Create a memory pool. */
6948#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6949 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6950 pool, rzB, is_zeroed, 0, 0)
6951
6952/* Create a memory pool with some flags specifying extended behaviour.
6953 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6954
6955 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6956 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6957 by the application as superblocks to dole out MALLOC_LIKE blocks using
6958 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6959 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6960 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6961 Note that the association between the pool and the second level blocks
6962 is implicit : second level blocks will be located inside first level
6963 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6964 for such 2 levels pools, as otherwise valgrind will detect overlapping
6965 memory blocks, and will abort execution (e.g. during leak search).
6966
6967 Such a meta pool can also be marked as an 'auto free' pool using the flag
6968 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6969 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6970 will automatically free the second level blocks that are contained
6971 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6972 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6973 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6974 in the first level block.
6975 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6976 without the VALGRIND_MEMPOOL_METAPOOL flag.
6977*/
6978#define VALGRIND_MEMPOOL_AUTO_FREE 1
6979#define VALGRIND_MEMPOOL_METAPOOL 2
6980#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6981 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6982 pool, rzB, is_zeroed, flags, 0)
6983
6984/* Destroy a memory pool. */
6985#define VALGRIND_DESTROY_MEMPOOL(pool) \
6986 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6987 pool, 0, 0, 0, 0)
6988
6989/* Associate a piece of memory with a memory pool. */
6990#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6991 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6992 pool, addr, size, 0, 0)
6993
6994/* Disassociate a piece of memory from a memory pool. */
6995#define VALGRIND_MEMPOOL_FREE(pool, addr) \
6996 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6997 pool, addr, 0, 0, 0)
6998
6999/* Disassociate any pieces outside a particular range. */
7000#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7001 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7002 pool, addr, size, 0, 0)
7003
7004/* Resize and/or move a piece associated with a memory pool. */
7005#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7006 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7007 poolA, poolB, 0, 0, 0)
7008
7009/* Resize and/or move a piece associated with a memory pool. */
7010#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7011 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7012 pool, addrA, addrB, size, 0)
7013
7014/* Return 1 if a mempool exists, else 0. */
7015#define VALGRIND_MEMPOOL_EXISTS(pool) \
7016 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7017 VG_USERREQ__MEMPOOL_EXISTS, \
7018 pool, 0, 0, 0, 0)
7019
7020/* Mark a piece of memory as being a stack. Returns a stack id.
7021 start is the lowest addressable stack byte, end is the highest
7022 addressable stack byte. */
7023#define VALGRIND_STACK_REGISTER(start, end) \
7024 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7025 VG_USERREQ__STACK_REGISTER, \
7026 start, end, 0, 0, 0)
7027
7028/* Unmark the piece of memory associated with a stack id as being a
7029 stack. */
7030#define VALGRIND_STACK_DEREGISTER(id) \
7031 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7032 id, 0, 0, 0, 0)
7033
7034/* Change the start and end address of the stack id.
7035 start is the new lowest addressable stack byte, end is the new highest
7036 addressable stack byte. */
7037#define VALGRIND_STACK_CHANGE(id, start, end) \
7038 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7039 id, start, end, 0, 0)
7040
7041/* Load PDB debug info for Wine PE image_map. */
7042#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7043 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7044 fd, ptr, total_size, delta, 0)
7045
7046/* Map a code address to a source file name and line number. buf64
7047 must point to a 64-byte buffer in the caller's address space. The
7048 result will be dumped in there and is guaranteed to be zero
7049 terminated. If no info is found, the first byte is set to zero. */
7050#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7051 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7052 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7053 addr, buf64, 0, 0, 0)
7054
7055/* Disable error reporting for this thread. Behaves in a stack like
7056 way, so you can safely call this multiple times provided that
7057 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7058 to re-enable reporting. The first call of this macro disables
7059 reporting. Subsequent calls have no effect except to increase the
7060 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7061 reporting. Child threads do not inherit this setting from their
7062 parents -- they are always created with reporting enabled. */
7063#define VALGRIND_DISABLE_ERROR_REPORTING \
7064 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7065 1, 0, 0, 0, 0)
7066
7067/* Re-enable error reporting, as per comments on
7068 VALGRIND_DISABLE_ERROR_REPORTING. */
7069#define VALGRIND_ENABLE_ERROR_REPORTING \
7070 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7071 -1, 0, 0, 0, 0)
7072
7073/* Execute a monitor command from the client program.
7074 If a connection is opened with GDB, the output will be sent
7075 according to the output mode set for vgdb.
7076 If no connection is opened, output will go to the log output.
7077 Returns 1 if command not recognised, 0 otherwise. */
7078#define VALGRIND_MONITOR_COMMAND(command) \
7079 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7080 command, 0, 0, 0, 0)
7081
7082
7083/* Change the value of a dynamic command line option.
7084 Note that unknown or not dynamically changeable options
7085 will cause a warning message to be output. */
7086#define VALGRIND_CLO_CHANGE(option) \
7087 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7088 option, 0, 0, 0, 0)
7089
7090
7091#undef PLAT_x86_darwin
7092#undef PLAT_amd64_darwin
7093#undef PLAT_x86_win32
7094#undef PLAT_amd64_win64
7095#undef PLAT_x86_linux
7096#undef PLAT_amd64_linux
7097#undef PLAT_ppc32_linux
7098#undef PLAT_ppc64be_linux
7099#undef PLAT_ppc64le_linux
7100#undef PLAT_arm_linux
7101#undef PLAT_s390x_linux
7102#undef PLAT_mips32_linux
7103#undef PLAT_mips64_linux
7104#undef PLAT_nanomips_linux
7105#undef PLAT_x86_solaris
7106#undef PLAT_amd64_solaris
7107
7108#endif /* __VALGRIND_H */