A Discrete-Event Network Simulator
API
valgrind.h
Go to the documentation of this file.
1 /* -*- c -*-
2  ----------------------------------------------------------------
3 
4  Notice that the following BSD-style license applies to this one
5  file (valgrind.h) only. The rest of Valgrind is licensed under the
6  terms of the GNU General Public License, version 2, unless
7  otherwise indicated. See the COPYING file in the source
8  distribution for details.
9 
10  ----------------------------------------------------------------
11 
12  This file is part of Valgrind, a dynamic binary instrumentation
13  framework.
14 
15  Copyright (C) 2000-2013 Julian Seward. All rights reserved.
16 
17  Redistribution and use in source and binary forms, with or without
18  modification, are permitted provided that the following conditions
19  are met:
20 
21  1. Redistributions of source code must retain the above copyright
22  notice, this list of conditions and the following disclaimer.
23 
24  2. The origin of this software must not be misrepresented; you must
25  not claim that you wrote the original software. If you use this
26  software in a product, an acknowledgment in the product
27  documentation would be appreciated but is not required.
28 
29  3. Altered source versions must be plainly marked as such, and must
30  not be misrepresented as being the original software.
31 
32  4. The name of the author may not be used to endorse or promote
33  products derived from this software without specific prior written
34  permission.
35 
36  THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37  OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39  ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42  GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44  WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47 
48  ----------------------------------------------------------------
49 
50  Notice that the above BSD-style license applies to this one file
51  (valgrind.h) only. The entire rest of Valgrind is licensed under
52  the terms of the GNU General Public License, version 2. See the
53  COPYING file in the source distribution for details.
54 
55  ----------------------------------------------------------------
56 */
57 
58 // NOLINTBEGIN
59 // clang-format off
60 
61 /* This file is for inclusion into client (your!) code.
62 
63  You can use these macros to manipulate and query Valgrind's
64  execution inside your own programs.
65 
66  The resulting executables will still run without Valgrind, just a
67  little bit more slowly than they otherwise would, but otherwise
68  unchanged. When not running on valgrind, each client request
69  consumes very few (eg. 7) instructions, so the resulting performance
70  loss is negligible unless you plan to execute client requests
71  millions of times per second. Nevertheless, if that is still a
72  problem, you can compile with the NVALGRIND symbol defined (gcc
73  -DNVALGRIND) so that client requests are not even compiled in. */
74 
75 #ifndef __VALGRIND_H
76 #define __VALGRIND_H
77 
78 
79 /* ------------------------------------------------------------------ */
80 /* VERSION NUMBER OF VALGRIND */
81 /* ------------------------------------------------------------------ */
82 
83 /* Specify Valgrind's version number, so that user code can
84  conditionally compile based on our version number. Note that these
85  were introduced at version 3.6 and so do not exist in version 3.5
86  or earlier. The recommended way to use them to check for "version
87  X.Y or later" is (eg)
88 
89 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
90  && (__VALGRIND_MAJOR__ > 3 \
91  || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
92 */
93 #define __VALGRIND_MAJOR__ 3
94 #define __VALGRIND_MINOR__ 9
95 
96 
97 #include <stdarg.h>
98 
99 /* Nb: this file might be included in a file compiled with -ansi. So
100  we can't use C++ style "//" comments nor the "asm" keyword (instead
101  use "__asm__"). */
102 
103 /* Derive some tags indicating what the target platform is. Note
104  that in this file we're using the compiler's CPP symbols for
105  identifying architectures, which are different to the ones we use
106  within the rest of Valgrind. Note, __powerpc__ is active for both
107  32 and 64-bit PPC, whereas __powerpc64__ is only active for the
108  latter (on Linux, that is).
109 
110  Misc note: how to find out what's predefined in gcc by default:
111  gcc -Wp,-dM somefile.c
112 */
113 #undef PLAT_x86_darwin
114 #undef PLAT_amd64_darwin
115 #undef PLAT_x86_win32
116 #undef PLAT_amd64_win64
117 #undef PLAT_x86_linux
118 #undef PLAT_amd64_linux
119 #undef PLAT_ppc32_linux
120 #undef PLAT_ppc64_linux
121 #undef PLAT_arm_linux
122 #undef PLAT_arm64_linux
123 #undef PLAT_s390x_linux
124 #undef PLAT_mips32_linux
125 #undef PLAT_mips64_linux
126 
127 
128 #if defined(__APPLE__) && defined(__i386__)
129 # define PLAT_x86_darwin 1
130 #elif defined(__APPLE__) && defined(__x86_64__)
131 # define PLAT_amd64_darwin 1
132 #elif defined(__MINGW32__) || defined(__CYGWIN32__) \
133  || (defined(_WIN32) && defined(_M_IX86))
134 # define PLAT_x86_win32 1
135 #elif defined(__MINGW64__) || (defined(_WIN64) && defined(_M_X64))
136 # define PLAT_amd64_win64 1
137 #elif defined(__linux__) && defined(__i386__)
138 # define PLAT_x86_linux 1
139 #elif defined(__linux__) && defined(__x86_64__)
140 # define PLAT_amd64_linux 1
141 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
142 # define PLAT_ppc32_linux 1
143 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
144 # define PLAT_ppc64_linux 1
145 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
146 # define PLAT_arm_linux 1
147 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
148 # define PLAT_arm64_linux 1
149 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
150 # define PLAT_s390x_linux 1
151 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
152 # define PLAT_mips64_linux 1
153 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
154 # define PLAT_mips32_linux 1
155 #else
156 /* If we're not compiling for our target platform, don't generate
157  any inline asms. */
158 # if !defined(NVALGRIND)
159 # define NVALGRIND 1
160 # endif
161 #endif
162 
163 
164 /* ------------------------------------------------------------------ */
165 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
166 /* in here of use to end-users -- skip to the next section. */
167 /* ------------------------------------------------------------------ */
168 
169 /*
170  * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
171  * request. Accepts both pointers and integers as arguments.
172  *
173  * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
174  * client request that does not return a value.
175 
176  * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
177  * client request and whose value equals the client request result. Accepts
178  * both pointers and integers as arguments. Note that such calls are not
179  * necessarily pure functions -- they may have side effects.
180  */
181 
182 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
183  _zzq_request, _zzq_arg1, _zzq_arg2, \
184  _zzq_arg3, _zzq_arg4, _zzq_arg5) \
185  do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
186  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
187  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
188 
189 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
190  _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
191  do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
192  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
193  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
194 
195 #if defined(NVALGRIND)
196 
197 /* Define NVALGRIND to completely remove the Valgrind magic sequence
198  from the compiled code (analogous to NDEBUG's effects on
199  assert()) */
200 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
201  _zzq_default, _zzq_request, \
202  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
203  (_zzq_default)
204 
205 #else /* ! NVALGRIND */
206 
207 /* The following defines the magic code sequences which the JITter
208  spots and handles magically. Don't look too closely at them as
209  they will rot your brain.
210 
211  The assembly code sequences for all architectures is in this one
212  file. This is because this file must be stand-alone, and we don't
213  want to have multiple files.
214 
215  For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
216  value gets put in the return slot, so that everything works when
217  this is executed not under Valgrind. Args are passed in a memory
218  block, and so there's no intrinsic limit to the number that could
219  be passed, but it's currently five.
220 
221  The macro args are:
222  _zzq_rlval result lvalue
223  _zzq_default default value (result returned when running on real CPU)
224  _zzq_request request code
225  _zzq_arg1..5 request params
226 
227  The other two macros are used to support function wrapping, and are
228  a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
229  guest's NRADDR pseudo-register and whatever other information is
230  needed to safely run the call original from the wrapper: on
231  ppc64-linux, the R2 value at the divert point is also needed. This
232  information is abstracted into a user-visible type, OrigFn.
233 
234  VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
235  guest, but guarantees that the branch instruction will not be
236  redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
237  branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
238  complete inline asm, since it needs to be combined with more magic
239  inline asm stuff to be useful.
240 */
241 
242 /* ------------------------- x86-{linux,darwin} ---------------- */
243 
244 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
245  || (defined(PLAT_x86_win32) && defined(__GNUC__))
246 
247 typedef
248  struct {
249  unsigned int nraddr; /* where's the code? */
250  }
251  OrigFn;
252 
253 #define __SPECIAL_INSTRUCTION_PREAMBLE \
254  "roll $3, %%edi ; roll $13, %%edi\n\t" \
255  "roll $29, %%edi ; roll $19, %%edi\n\t"
256 
257 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
258  _zzq_default, _zzq_request, \
259  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
260  __extension__ \
261  ({volatile unsigned int _zzq_args[6]; \
262  volatile unsigned int _zzq_result; \
263  _zzq_args[0] = (unsigned int)(_zzq_request); \
264  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
265  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
266  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
267  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
268  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
269  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
270  /* %EDX = client_request ( %EAX ) */ \
271  "xchgl %%ebx,%%ebx" \
272  : "=d" (_zzq_result) \
273  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
274  : "cc", "memory" \
275  ); \
276  _zzq_result; \
277  })
278 
279 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
280  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
281  volatile unsigned int __addr; \
282  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
283  /* %EAX = guest_NRADDR */ \
284  "xchgl %%ecx,%%ecx" \
285  : "=a" (__addr) \
286  : \
287  : "cc", "memory" \
288  ); \
289  _zzq_orig->nraddr = __addr; \
290  }
291 
292 #define VALGRIND_CALL_NOREDIR_EAX \
293  __SPECIAL_INSTRUCTION_PREAMBLE \
294  /* call-noredir *%EAX */ \
295  "xchgl %%edx,%%edx\n\t"
296 
297 #define VALGRIND_VEX_INJECT_IR() \
298  do { \
299  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
300  "xchgl %%edi,%%edi\n\t" \
301  : : : "cc", "memory" \
302  ); \
303  } while (0)
304 
305 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
306 
307 /* ------------------------- x86-Win32 ------------------------- */
308 
309 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
310 
311 typedef
312  struct {
313  unsigned int nraddr; /* where's the code? */
314  }
315  OrigFn;
316 
317 #if defined(_MSC_VER)
318 
319 #define __SPECIAL_INSTRUCTION_PREAMBLE \
320  __asm rol edi, 3 __asm rol edi, 13 \
321  __asm rol edi, 29 __asm rol edi, 19
322 
323 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
324  _zzq_default, _zzq_request, \
325  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
326  valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
327  (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
328  (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
329  (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
330 
331 static __inline uintptr_t
332 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
333  uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
334  uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
335  uintptr_t _zzq_arg5)
336 {
337  volatile uintptr_t _zzq_args[6];
338  volatile unsigned int _zzq_result;
339  _zzq_args[0] = (uintptr_t)(_zzq_request);
340  _zzq_args[1] = (uintptr_t)(_zzq_arg1);
341  _zzq_args[2] = (uintptr_t)(_zzq_arg2);
342  _zzq_args[3] = (uintptr_t)(_zzq_arg3);
343  _zzq_args[4] = (uintptr_t)(_zzq_arg4);
344  _zzq_args[5] = (uintptr_t)(_zzq_arg5);
345  __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
346  __SPECIAL_INSTRUCTION_PREAMBLE
347  /* %EDX = client_request ( %EAX ) */
348  __asm xchg ebx,ebx
349  __asm mov _zzq_result, edx
350  }
351  return _zzq_result;
352 }
353 
354 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
355  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
356  volatile unsigned int __addr; \
357  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
358  /* %EAX = guest_NRADDR */ \
359  __asm xchg ecx,ecx \
360  __asm mov __addr, eax \
361  } \
362  _zzq_orig->nraddr = __addr; \
363  }
364 
365 #define VALGRIND_CALL_NOREDIR_EAX ERROR
366 
367 #define VALGRIND_VEX_INJECT_IR() \
368  do { \
369  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
370  __asm xchg edi,edi \
371  } \
372  } while (0)
373 
374 #else
375 #error Unsupported compiler.
376 #endif
377 
378 #endif /* PLAT_x86_win32 */
379 
380 /* ------------------------ amd64-{linux,darwin} --------------- */
381 
382 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
383 
384 typedef
385  struct {
386  unsigned long long int nraddr; /* where's the code? */
387  }
388  OrigFn;
389 
390 #define __SPECIAL_INSTRUCTION_PREAMBLE \
391  "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
392  "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
393 
394 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
395  _zzq_default, _zzq_request, \
396  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
397  __extension__ \
398  ({ volatile unsigned long long int _zzq_args[6]; \
399  volatile unsigned long long int _zzq_result; \
400  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
401  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
402  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
403  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
404  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
405  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
406  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
407  /* %RDX = client_request ( %RAX ) */ \
408  "xchgq %%rbx,%%rbx" \
409  : "=d" (_zzq_result) \
410  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
411  : "cc", "memory" \
412  ); \
413  _zzq_result; \
414  })
415 
416 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
417  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
418  volatile unsigned long long int __addr; \
419  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
420  /* %RAX = guest_NRADDR */ \
421  "xchgq %%rcx,%%rcx" \
422  : "=a" (__addr) \
423  : \
424  : "cc", "memory" \
425  ); \
426  _zzq_orig->nraddr = __addr; \
427  }
428 
429 #define VALGRIND_CALL_NOREDIR_RAX \
430  __SPECIAL_INSTRUCTION_PREAMBLE \
431  /* call-noredir *%RAX */ \
432  "xchgq %%rdx,%%rdx\n\t"
433 
434 #define VALGRIND_VEX_INJECT_IR() \
435  do { \
436  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
437  "xchgq %%rdi,%%rdi\n\t" \
438  : : : "cc", "memory" \
439  ); \
440  } while (0)
441 
442 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
443 
444 /* ------------------------ ppc32-linux ------------------------ */
445 
446 #if defined(PLAT_ppc32_linux)
447 
448 typedef
449  struct {
450  unsigned int nraddr; /* where's the code? */
451  }
452  OrigFn;
453 
454 #define __SPECIAL_INSTRUCTION_PREAMBLE \
455  "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
456  "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
457 
458 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
459  _zzq_default, _zzq_request, \
460  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
461  \
462  __extension__ \
463  ({ unsigned int _zzq_args[6]; \
464  unsigned int _zzq_result; \
465  unsigned int* _zzq_ptr; \
466  _zzq_args[0] = (unsigned int)(_zzq_request); \
467  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
468  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
469  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
470  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
471  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
472  _zzq_ptr = _zzq_args; \
473  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
474  "mr 4,%2\n\t" /*ptr*/ \
475  __SPECIAL_INSTRUCTION_PREAMBLE \
476  /* %R3 = client_request ( %R4 ) */ \
477  "or 1,1,1\n\t" \
478  "mr %0,3" /*result*/ \
479  : "=b" (_zzq_result) \
480  : "b" (_zzq_default), "b" (_zzq_ptr) \
481  : "cc", "memory", "r3", "r4"); \
482  _zzq_result; \
483  })
484 
485 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
486  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
487  unsigned int __addr; \
488  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
489  /* %R3 = guest_NRADDR */ \
490  "or 2,2,2\n\t" \
491  "mr %0,3" \
492  : "=b" (__addr) \
493  : \
494  : "cc", "memory", "r3" \
495  ); \
496  _zzq_orig->nraddr = __addr; \
497  }
498 
499 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
500  __SPECIAL_INSTRUCTION_PREAMBLE \
501  /* branch-and-link-to-noredir *%R11 */ \
502  "or 3,3,3\n\t"
503 
504 #define VALGRIND_VEX_INJECT_IR() \
505  do { \
506  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
507  "or 5,5,5\n\t" \
508  ); \
509  } while (0)
510 
511 #endif /* PLAT_ppc32_linux */
512 
513 /* ------------------------ ppc64-linux ------------------------ */
514 
515 #if defined(PLAT_ppc64_linux)
516 
517 typedef
518  struct {
519  unsigned long long int nraddr; /* where's the code? */
520  unsigned long long int r2; /* what tocptr do we need? */
521  }
522  OrigFn;
523 
524 #define __SPECIAL_INSTRUCTION_PREAMBLE \
525  "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
526  "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
527 
528 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
529  _zzq_default, _zzq_request, \
530  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
531  \
532  __extension__ \
533  ({ unsigned long long int _zzq_args[6]; \
534  unsigned long long int _zzq_result; \
535  unsigned long long int* _zzq_ptr; \
536  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
537  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
538  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
539  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
540  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
541  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
542  _zzq_ptr = _zzq_args; \
543  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
544  "mr 4,%2\n\t" /*ptr*/ \
545  __SPECIAL_INSTRUCTION_PREAMBLE \
546  /* %R3 = client_request ( %R4 ) */ \
547  "or 1,1,1\n\t" \
548  "mr %0,3" /*result*/ \
549  : "=b" (_zzq_result) \
550  : "b" (_zzq_default), "b" (_zzq_ptr) \
551  : "cc", "memory", "r3", "r4"); \
552  _zzq_result; \
553  })
554 
555 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
556  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
557  unsigned long long int __addr; \
558  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
559  /* %R3 = guest_NRADDR */ \
560  "or 2,2,2\n\t" \
561  "mr %0,3" \
562  : "=b" (__addr) \
563  : \
564  : "cc", "memory", "r3" \
565  ); \
566  _zzq_orig->nraddr = __addr; \
567  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
568  /* %R3 = guest_NRADDR_GPR2 */ \
569  "or 4,4,4\n\t" \
570  "mr %0,3" \
571  : "=b" (__addr) \
572  : \
573  : "cc", "memory", "r3" \
574  ); \
575  _zzq_orig->r2 = __addr; \
576  }
577 
578 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
579  __SPECIAL_INSTRUCTION_PREAMBLE \
580  /* branch-and-link-to-noredir *%R11 */ \
581  "or 3,3,3\n\t"
582 
583 #define VALGRIND_VEX_INJECT_IR() \
584  do { \
585  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
586  "or 5,5,5\n\t" \
587  ); \
588  } while (0)
589 
590 #endif /* PLAT_ppc64_linux */
591 
592 /* ------------------------- arm-linux ------------------------- */
593 
594 #if defined(PLAT_arm_linux)
595 
596 typedef
597  struct {
598  unsigned int nraddr; /* where's the code? */
599  }
600  OrigFn;
601 
602 #define __SPECIAL_INSTRUCTION_PREAMBLE \
603  "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
604  "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
605 
606 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
607  _zzq_default, _zzq_request, \
608  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
609  \
610  __extension__ \
611  ({volatile unsigned int _zzq_args[6]; \
612  volatile unsigned int _zzq_result; \
613  _zzq_args[0] = (unsigned int)(_zzq_request); \
614  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
615  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
616  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
617  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
618  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
619  __asm__ volatile("mov r3, %1\n\t" /*default*/ \
620  "mov r4, %2\n\t" /*ptr*/ \
621  __SPECIAL_INSTRUCTION_PREAMBLE \
622  /* R3 = client_request ( R4 ) */ \
623  "orr r10, r10, r10\n\t" \
624  "mov %0, r3" /*result*/ \
625  : "=r" (_zzq_result) \
626  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
627  : "cc","memory", "r3", "r4"); \
628  _zzq_result; \
629  })
630 
631 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
632  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
633  unsigned int __addr; \
634  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
635  /* R3 = guest_NRADDR */ \
636  "orr r11, r11, r11\n\t" \
637  "mov %0, r3" \
638  : "=r" (__addr) \
639  : \
640  : "cc", "memory", "r3" \
641  ); \
642  _zzq_orig->nraddr = __addr; \
643  }
644 
645 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
646  __SPECIAL_INSTRUCTION_PREAMBLE \
647  /* branch-and-link-to-noredir *%R4 */ \
648  "orr r12, r12, r12\n\t"
649 
650 #define VALGRIND_VEX_INJECT_IR() \
651  do { \
652  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
653  "orr r9, r9, r9\n\t" \
654  : : : "cc", "memory" \
655  ); \
656  } while (0)
657 
658 #endif /* PLAT_arm_linux */
659 
660 /* ------------------------ arm64-linux ------------------------- */
661 
662 #if defined(PLAT_arm64_linux)
663 
664 typedef
665  struct {
666  unsigned long long int nraddr; /* where's the code? */
667  }
668  OrigFn;
669 
670 #define __SPECIAL_INSTRUCTION_PREAMBLE \
671  "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
672  "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
673 
674 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
675  _zzq_default, _zzq_request, \
676  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
677  \
678  __extension__ \
679  ({volatile unsigned long long int _zzq_args[6]; \
680  volatile unsigned long long int _zzq_result; \
681  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
682  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
683  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
684  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
685  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
686  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
687  __asm__ volatile("mov x3, %1\n\t" /*default*/ \
688  "mov x4, %2\n\t" /*ptr*/ \
689  __SPECIAL_INSTRUCTION_PREAMBLE \
690  /* X3 = client_request ( X4 ) */ \
691  "orr x10, x10, x10\n\t" \
692  "mov %0, x3" /*result*/ \
693  : "=r" (_zzq_result) \
694  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
695  : "cc","memory", "x3", "x4"); \
696  _zzq_result; \
697  })
698 
699 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
700  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
701  unsigned long long int __addr; \
702  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
703  /* X3 = guest_NRADDR */ \
704  "orr x11, x11, x11\n\t" \
705  "mov %0, x3" \
706  : "=r" (__addr) \
707  : \
708  : "cc", "memory", "x3" \
709  ); \
710  _zzq_orig->nraddr = __addr; \
711  }
712 
713 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
714  __SPECIAL_INSTRUCTION_PREAMBLE \
715  /* branch-and-link-to-noredir X8 */ \
716  "orr x12, x12, x12\n\t"
717 
718 #define VALGRIND_VEX_INJECT_IR() \
719  do { \
720  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
721  "orr x9, x9, x9\n\t" \
722  : : : "cc", "memory" \
723  ); \
724  } while (0)
725 
726 #endif /* PLAT_arm64_linux */
727 
728 /* ------------------------ s390x-linux ------------------------ */
729 
730 #if defined(PLAT_s390x_linux)
731 
732 typedef
733  struct {
734  unsigned long long int nraddr; /* where's the code? */
735  }
736  OrigFn;
737 
738 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
739  * code. This detection is implemented in platform specific toIR.c
740  * (e.g. VEX/priv/guest_s390_decoder.c).
741  */
742 #define __SPECIAL_INSTRUCTION_PREAMBLE \
743  "lr 15,15\n\t" \
744  "lr 1,1\n\t" \
745  "lr 2,2\n\t" \
746  "lr 3,3\n\t"
747 
748 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
749 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
750 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
751 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
752 
753 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
754  _zzq_default, _zzq_request, \
755  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
756  __extension__ \
757  ({volatile unsigned long long int _zzq_args[6]; \
758  volatile unsigned long long int _zzq_result; \
759  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
760  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
761  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
762  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
763  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
764  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
765  __asm__ volatile(/* r2 = args */ \
766  "lgr 2,%1\n\t" \
767  /* r3 = default */ \
768  "lgr 3,%2\n\t" \
769  __SPECIAL_INSTRUCTION_PREAMBLE \
770  __CLIENT_REQUEST_CODE \
771  /* results = r3 */ \
772  "lgr %0, 3\n\t" \
773  : "=d" (_zzq_result) \
774  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
775  : "cc", "2", "3", "memory" \
776  ); \
777  _zzq_result; \
778  })
779 
780 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
781  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
782  volatile unsigned long long int __addr; \
783  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
784  __GET_NR_CONTEXT_CODE \
785  "lgr %0, 3\n\t" \
786  : "=a" (__addr) \
787  : \
788  : "cc", "3", "memory" \
789  ); \
790  _zzq_orig->nraddr = __addr; \
791  }
792 
793 #define VALGRIND_CALL_NOREDIR_R1 \
794  __SPECIAL_INSTRUCTION_PREAMBLE \
795  __CALL_NO_REDIR_CODE
796 
797 #define VALGRIND_VEX_INJECT_IR() \
798  do { \
799  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
800  __VEX_INJECT_IR_CODE); \
801  } while (0)
802 
803 #endif /* PLAT_s390x_linux */
804 
805 /* ------------------------- mips32-linux ---------------- */
806 
807 #if defined(PLAT_mips32_linux)
808 
809 typedef
810  struct {
811  unsigned int nraddr; /* where's the code? */
812  }
813  OrigFn;
814 
815 /* .word 0x342
816  * .word 0x742
817  * .word 0xC2
818  * .word 0x4C2*/
819 #define __SPECIAL_INSTRUCTION_PREAMBLE \
820  "srl $0, $0, 13\n\t" \
821  "srl $0, $0, 29\n\t" \
822  "srl $0, $0, 3\n\t" \
823  "srl $0, $0, 19\n\t"
824 
825 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
826  _zzq_default, _zzq_request, \
827  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
828  __extension__ \
829  ({ volatile unsigned int _zzq_args[6]; \
830  volatile unsigned int _zzq_result; \
831  _zzq_args[0] = (unsigned int)(_zzq_request); \
832  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
833  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
834  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
835  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
836  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
837  __asm__ volatile("move $11, %1\n\t" /*default*/ \
838  "move $12, %2\n\t" /*ptr*/ \
839  __SPECIAL_INSTRUCTION_PREAMBLE \
840  /* T3 = client_request ( T4 ) */ \
841  "or $13, $13, $13\n\t" \
842  "move %0, $11\n\t" /*result*/ \
843  : "=r" (_zzq_result) \
844  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
845  : "$11", "$12"); \
846  _zzq_result; \
847  })
848 
849 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
850  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
851  volatile unsigned int __addr; \
852  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
853  /* %t9 = guest_NRADDR */ \
854  "or $14, $14, $14\n\t" \
855  "move %0, $11" /*result*/ \
856  : "=r" (__addr) \
857  : \
858  : "$11" \
859  ); \
860  _zzq_orig->nraddr = __addr; \
861  }
862 
863 #define VALGRIND_CALL_NOREDIR_T9 \
864  __SPECIAL_INSTRUCTION_PREAMBLE \
865  /* call-noredir *%t9 */ \
866  "or $15, $15, $15\n\t"
867 
868 #define VALGRIND_VEX_INJECT_IR() \
869  do { \
870  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
871  "or $11, $11, $11\n\t" \
872  ); \
873  } while (0)
874 
875 
876 #endif /* PLAT_mips32_linux */
877 
878 /* ------------------------- mips64-linux ---------------- */
879 
880 #if defined(PLAT_mips64_linux)
881 
882 typedef
883  struct {
884  unsigned long long nraddr; /* where's the code? */
885  }
886  OrigFn;
887 
888 /* dsll $0,$0, 3
889  * dsll $0,$0, 13
890  * dsll $0,$0, 29
891  * dsll $0,$0, 19*/
892 #define __SPECIAL_INSTRUCTION_PREAMBLE \
893  "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
894  "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
895 
896 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
897  _zzq_default, _zzq_request, \
898  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
899  __extension__ \
900  ({ volatile unsigned long long int _zzq_args[6]; \
901  volatile unsigned long long int _zzq_result; \
902  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
903  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
904  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
905  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
906  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
907  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
908  __asm__ volatile("move $11, %1\n\t" /*default*/ \
909  "move $12, %2\n\t" /*ptr*/ \
910  __SPECIAL_INSTRUCTION_PREAMBLE \
911  /* $11 = client_request ( $12 ) */ \
912  "or $13, $13, $13\n\t" \
913  "move %0, $11\n\t" /*result*/ \
914  : "=r" (_zzq_result) \
915  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
916  : "$11", "$12"); \
917  _zzq_result; \
918  })
919 
920 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
921  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
922  volatile unsigned long long int __addr; \
923  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
924  /* $11 = guest_NRADDR */ \
925  "or $14, $14, $14\n\t" \
926  "move %0, $11" /*result*/ \
927  : "=r" (__addr) \
928  : \
929  : "$11"); \
930  _zzq_orig->nraddr = __addr; \
931  }
932 
933 #define VALGRIND_CALL_NOREDIR_T9 \
934  __SPECIAL_INSTRUCTION_PREAMBLE \
935  /* call-noredir $25 */ \
936  "or $15, $15, $15\n\t"
937 
938 #define VALGRIND_VEX_INJECT_IR() \
939  do { \
940  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
941  "or $11, $11, $11\n\t" \
942  ); \
943  } while (0)
944 
945 #endif /* PLAT_mips64_linux */
946 
947 /* Insert assembly code for other platforms here... */
948 
949 #endif /* NVALGRIND */
950 
951 
952 /* ------------------------------------------------------------------ */
953 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
954 /* ugly. It's the least-worst tradeoff I can think of. */
955 /* ------------------------------------------------------------------ */
956 
957 /* This section defines magic (a.k.a appalling-hack) macros for doing
958  guaranteed-no-redirection macros, so as to get from function
959  wrappers to the functions they are wrapping. The whole point is to
960  construct standard call sequences, but to do the call itself with a
961  special no-redirect call pseudo-instruction that the JIT
962  understands and handles specially. This section is long and
963  repetitious, and I can't see a way to make it shorter.
964 
965  The naming scheme is as follows:
966 
967  CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
968 
969  'W' stands for "word" and 'v' for "void". Hence there are
970  different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
971  and for each, the possibility of returning a word-typed result, or
972  no result.
973 */
974 
975 /* Use these to write the name of your wrapper. NOTE: duplicates
976  VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
977  the default behaviour equivalence class tag "0000" into the name.
978  See pub_tool_redir.h for details -- normally you don't need to
979  think about this, though. */
980 
981 /* Use an extra level of macroisation so as to ensure the soname/fnname
982  args are fully macro-expanded before pasting them together. */
983 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
984 
985 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
986  VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
987 
988 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
989  VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
990 
991 /* Use this macro from within a wrapper function to collect the
992  context (address and possibly other info) of the original function.
993  Once you have that you can then use it in one of the CALL_FN_
994  macros. The type of the argument _lval is OrigFn. */
995 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
996 
997 /* Also provide end-user facilities for function replacement, rather
998  than wrapping. A replacement function differs from a wrapper in
999  that it has no way to get hold of the original function being
1000  called, and hence no way to call onwards to it. In a replacement
1001  function, VALGRIND_GET_ORIG_FN always returns zero. */
1002 
1003 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1004  VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1005 
1006 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1007  VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1008 
1009 /* Derivatives of the main macros below, for calling functions
1010  returning void. */
1011 
1012 #define CALL_FN_v_v(fnptr) \
1013  do { volatile unsigned long _junk; \
1014  CALL_FN_W_v(_junk,fnptr); } while (0)
1015 
1016 #define CALL_FN_v_W(fnptr, arg1) \
1017  do { volatile unsigned long _junk; \
1018  CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1019 
1020 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1021  do { volatile unsigned long _junk; \
1022  CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1023 
1024 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1025  do { volatile unsigned long _junk; \
1026  CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1027 
1028 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1029  do { volatile unsigned long _junk; \
1030  CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1031 
1032 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1033  do { volatile unsigned long _junk; \
1034  CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1035 
1036 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1037  do { volatile unsigned long _junk; \
1038  CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1039 
1040 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1041  do { volatile unsigned long _junk; \
1042  CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1043 
1044 /* ------------------------- x86-{linux,darwin} ---------------- */
1045 
1046 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1047 
1048 /* These regs are trashed by the hidden call. No need to mention eax
1049  as gcc can already see that, plus causes gcc to bomb. */
1050 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1051 
1052 /* Macros to save and align the stack before making a function
1053  call and restore it afterwards as gcc may not keep the stack
1054  pointer aligned if it doesn't realise calls are being made
1055  to other functions. */
1056 
1057 #define VALGRIND_ALIGN_STACK \
1058  "movl %%esp,%%edi\n\t" \
1059  "andl $0xfffffff0,%%esp\n\t"
1060 #define VALGRIND_RESTORE_STACK \
1061  "movl %%edi,%%esp\n\t"
1062 
1063 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1064  long) == 4. */
1065 
1066 #define CALL_FN_W_v(lval, orig) \
1067  do { \
1068  volatile OrigFn _orig = (orig); \
1069  volatile unsigned long _argvec[1]; \
1070  volatile unsigned long _res; \
1071  _argvec[0] = (unsigned long)_orig.nraddr; \
1072  __asm__ volatile( \
1073  VALGRIND_ALIGN_STACK \
1074  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1075  VALGRIND_CALL_NOREDIR_EAX \
1076  VALGRIND_RESTORE_STACK \
1077  : /*out*/ "=a" (_res) \
1078  : /*in*/ "a" (&_argvec[0]) \
1079  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1080  ); \
1081  lval = (__typeof__(lval)) _res; \
1082  } while (0)
1083 
1084 #define CALL_FN_W_W(lval, orig, arg1) \
1085  do { \
1086  volatile OrigFn _orig = (orig); \
1087  volatile unsigned long _argvec[2]; \
1088  volatile unsigned long _res; \
1089  _argvec[0] = (unsigned long)_orig.nraddr; \
1090  _argvec[1] = (unsigned long)(arg1); \
1091  __asm__ volatile( \
1092  VALGRIND_ALIGN_STACK \
1093  "subl $12, %%esp\n\t" \
1094  "pushl 4(%%eax)\n\t" \
1095  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1096  VALGRIND_CALL_NOREDIR_EAX \
1097  VALGRIND_RESTORE_STACK \
1098  : /*out*/ "=a" (_res) \
1099  : /*in*/ "a" (&_argvec[0]) \
1100  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1101  ); \
1102  lval = (__typeof__(lval)) _res; \
1103  } while (0)
1104 
1105 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1106  do { \
1107  volatile OrigFn _orig = (orig); \
1108  volatile unsigned long _argvec[3]; \
1109  volatile unsigned long _res; \
1110  _argvec[0] = (unsigned long)_orig.nraddr; \
1111  _argvec[1] = (unsigned long)(arg1); \
1112  _argvec[2] = (unsigned long)(arg2); \
1113  __asm__ volatile( \
1114  VALGRIND_ALIGN_STACK \
1115  "subl $8, %%esp\n\t" \
1116  "pushl 8(%%eax)\n\t" \
1117  "pushl 4(%%eax)\n\t" \
1118  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1119  VALGRIND_CALL_NOREDIR_EAX \
1120  VALGRIND_RESTORE_STACK \
1121  : /*out*/ "=a" (_res) \
1122  : /*in*/ "a" (&_argvec[0]) \
1123  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1124  ); \
1125  lval = (__typeof__(lval)) _res; \
1126  } while (0)
1127 
1128 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1129  do { \
1130  volatile OrigFn _orig = (orig); \
1131  volatile unsigned long _argvec[4]; \
1132  volatile unsigned long _res; \
1133  _argvec[0] = (unsigned long)_orig.nraddr; \
1134  _argvec[1] = (unsigned long)(arg1); \
1135  _argvec[2] = (unsigned long)(arg2); \
1136  _argvec[3] = (unsigned long)(arg3); \
1137  __asm__ volatile( \
1138  VALGRIND_ALIGN_STACK \
1139  "subl $4, %%esp\n\t" \
1140  "pushl 12(%%eax)\n\t" \
1141  "pushl 8(%%eax)\n\t" \
1142  "pushl 4(%%eax)\n\t" \
1143  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1144  VALGRIND_CALL_NOREDIR_EAX \
1145  VALGRIND_RESTORE_STACK \
1146  : /*out*/ "=a" (_res) \
1147  : /*in*/ "a" (&_argvec[0]) \
1148  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1149  ); \
1150  lval = (__typeof__(lval)) _res; \
1151  } while (0)
1152 
1153 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1154  do { \
1155  volatile OrigFn _orig = (orig); \
1156  volatile unsigned long _argvec[5]; \
1157  volatile unsigned long _res; \
1158  _argvec[0] = (unsigned long)_orig.nraddr; \
1159  _argvec[1] = (unsigned long)(arg1); \
1160  _argvec[2] = (unsigned long)(arg2); \
1161  _argvec[3] = (unsigned long)(arg3); \
1162  _argvec[4] = (unsigned long)(arg4); \
1163  __asm__ volatile( \
1164  VALGRIND_ALIGN_STACK \
1165  "pushl 16(%%eax)\n\t" \
1166  "pushl 12(%%eax)\n\t" \
1167  "pushl 8(%%eax)\n\t" \
1168  "pushl 4(%%eax)\n\t" \
1169  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1170  VALGRIND_CALL_NOREDIR_EAX \
1171  VALGRIND_RESTORE_STACK \
1172  : /*out*/ "=a" (_res) \
1173  : /*in*/ "a" (&_argvec[0]) \
1174  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1175  ); \
1176  lval = (__typeof__(lval)) _res; \
1177  } while (0)
1178 
1179 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1180  do { \
1181  volatile OrigFn _orig = (orig); \
1182  volatile unsigned long _argvec[6]; \
1183  volatile unsigned long _res; \
1184  _argvec[0] = (unsigned long)_orig.nraddr; \
1185  _argvec[1] = (unsigned long)(arg1); \
1186  _argvec[2] = (unsigned long)(arg2); \
1187  _argvec[3] = (unsigned long)(arg3); \
1188  _argvec[4] = (unsigned long)(arg4); \
1189  _argvec[5] = (unsigned long)(arg5); \
1190  __asm__ volatile( \
1191  VALGRIND_ALIGN_STACK \
1192  "subl $12, %%esp\n\t" \
1193  "pushl 20(%%eax)\n\t" \
1194  "pushl 16(%%eax)\n\t" \
1195  "pushl 12(%%eax)\n\t" \
1196  "pushl 8(%%eax)\n\t" \
1197  "pushl 4(%%eax)\n\t" \
1198  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1199  VALGRIND_CALL_NOREDIR_EAX \
1200  VALGRIND_RESTORE_STACK \
1201  : /*out*/ "=a" (_res) \
1202  : /*in*/ "a" (&_argvec[0]) \
1203  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1204  ); \
1205  lval = (__typeof__(lval)) _res; \
1206  } while (0)
1207 
1208 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1209  do { \
1210  volatile OrigFn _orig = (orig); \
1211  volatile unsigned long _argvec[7]; \
1212  volatile unsigned long _res; \
1213  _argvec[0] = (unsigned long)_orig.nraddr; \
1214  _argvec[1] = (unsigned long)(arg1); \
1215  _argvec[2] = (unsigned long)(arg2); \
1216  _argvec[3] = (unsigned long)(arg3); \
1217  _argvec[4] = (unsigned long)(arg4); \
1218  _argvec[5] = (unsigned long)(arg5); \
1219  _argvec[6] = (unsigned long)(arg6); \
1220  __asm__ volatile( \
1221  VALGRIND_ALIGN_STACK \
1222  "subl $8, %%esp\n\t" \
1223  "pushl 24(%%eax)\n\t" \
1224  "pushl 20(%%eax)\n\t" \
1225  "pushl 16(%%eax)\n\t" \
1226  "pushl 12(%%eax)\n\t" \
1227  "pushl 8(%%eax)\n\t" \
1228  "pushl 4(%%eax)\n\t" \
1229  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1230  VALGRIND_CALL_NOREDIR_EAX \
1231  VALGRIND_RESTORE_STACK \
1232  : /*out*/ "=a" (_res) \
1233  : /*in*/ "a" (&_argvec[0]) \
1234  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1235  ); \
1236  lval = (__typeof__(lval)) _res; \
1237  } while (0)
1238 
1239 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1240  arg7) \
1241  do { \
1242  volatile OrigFn _orig = (orig); \
1243  volatile unsigned long _argvec[8]; \
1244  volatile unsigned long _res; \
1245  _argvec[0] = (unsigned long)_orig.nraddr; \
1246  _argvec[1] = (unsigned long)(arg1); \
1247  _argvec[2] = (unsigned long)(arg2); \
1248  _argvec[3] = (unsigned long)(arg3); \
1249  _argvec[4] = (unsigned long)(arg4); \
1250  _argvec[5] = (unsigned long)(arg5); \
1251  _argvec[6] = (unsigned long)(arg6); \
1252  _argvec[7] = (unsigned long)(arg7); \
1253  __asm__ volatile( \
1254  VALGRIND_ALIGN_STACK \
1255  "subl $4, %%esp\n\t" \
1256  "pushl 28(%%eax)\n\t" \
1257  "pushl 24(%%eax)\n\t" \
1258  "pushl 20(%%eax)\n\t" \
1259  "pushl 16(%%eax)\n\t" \
1260  "pushl 12(%%eax)\n\t" \
1261  "pushl 8(%%eax)\n\t" \
1262  "pushl 4(%%eax)\n\t" \
1263  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1264  VALGRIND_CALL_NOREDIR_EAX \
1265  VALGRIND_RESTORE_STACK \
1266  : /*out*/ "=a" (_res) \
1267  : /*in*/ "a" (&_argvec[0]) \
1268  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1269  ); \
1270  lval = (__typeof__(lval)) _res; \
1271  } while (0)
1272 
1273 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1274  arg7,arg8) \
1275  do { \
1276  volatile OrigFn _orig = (orig); \
1277  volatile unsigned long _argvec[9]; \
1278  volatile unsigned long _res; \
1279  _argvec[0] = (unsigned long)_orig.nraddr; \
1280  _argvec[1] = (unsigned long)(arg1); \
1281  _argvec[2] = (unsigned long)(arg2); \
1282  _argvec[3] = (unsigned long)(arg3); \
1283  _argvec[4] = (unsigned long)(arg4); \
1284  _argvec[5] = (unsigned long)(arg5); \
1285  _argvec[6] = (unsigned long)(arg6); \
1286  _argvec[7] = (unsigned long)(arg7); \
1287  _argvec[8] = (unsigned long)(arg8); \
1288  __asm__ volatile( \
1289  VALGRIND_ALIGN_STACK \
1290  "pushl 32(%%eax)\n\t" \
1291  "pushl 28(%%eax)\n\t" \
1292  "pushl 24(%%eax)\n\t" \
1293  "pushl 20(%%eax)\n\t" \
1294  "pushl 16(%%eax)\n\t" \
1295  "pushl 12(%%eax)\n\t" \
1296  "pushl 8(%%eax)\n\t" \
1297  "pushl 4(%%eax)\n\t" \
1298  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1299  VALGRIND_CALL_NOREDIR_EAX \
1300  VALGRIND_RESTORE_STACK \
1301  : /*out*/ "=a" (_res) \
1302  : /*in*/ "a" (&_argvec[0]) \
1303  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1304  ); \
1305  lval = (__typeof__(lval)) _res; \
1306  } while (0)
1307 
1308 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1309  arg7,arg8,arg9) \
1310  do { \
1311  volatile OrigFn _orig = (orig); \
1312  volatile unsigned long _argvec[10]; \
1313  volatile unsigned long _res; \
1314  _argvec[0] = (unsigned long)_orig.nraddr; \
1315  _argvec[1] = (unsigned long)(arg1); \
1316  _argvec[2] = (unsigned long)(arg2); \
1317  _argvec[3] = (unsigned long)(arg3); \
1318  _argvec[4] = (unsigned long)(arg4); \
1319  _argvec[5] = (unsigned long)(arg5); \
1320  _argvec[6] = (unsigned long)(arg6); \
1321  _argvec[7] = (unsigned long)(arg7); \
1322  _argvec[8] = (unsigned long)(arg8); \
1323  _argvec[9] = (unsigned long)(arg9); \
1324  __asm__ volatile( \
1325  VALGRIND_ALIGN_STACK \
1326  "subl $12, %%esp\n\t" \
1327  "pushl 36(%%eax)\n\t" \
1328  "pushl 32(%%eax)\n\t" \
1329  "pushl 28(%%eax)\n\t" \
1330  "pushl 24(%%eax)\n\t" \
1331  "pushl 20(%%eax)\n\t" \
1332  "pushl 16(%%eax)\n\t" \
1333  "pushl 12(%%eax)\n\t" \
1334  "pushl 8(%%eax)\n\t" \
1335  "pushl 4(%%eax)\n\t" \
1336  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1337  VALGRIND_CALL_NOREDIR_EAX \
1338  VALGRIND_RESTORE_STACK \
1339  : /*out*/ "=a" (_res) \
1340  : /*in*/ "a" (&_argvec[0]) \
1341  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1342  ); \
1343  lval = (__typeof__(lval)) _res; \
1344  } while (0)
1345 
1346 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1347  arg7,arg8,arg9,arg10) \
1348  do { \
1349  volatile OrigFn _orig = (orig); \
1350  volatile unsigned long _argvec[11]; \
1351  volatile unsigned long _res; \
1352  _argvec[0] = (unsigned long)_orig.nraddr; \
1353  _argvec[1] = (unsigned long)(arg1); \
1354  _argvec[2] = (unsigned long)(arg2); \
1355  _argvec[3] = (unsigned long)(arg3); \
1356  _argvec[4] = (unsigned long)(arg4); \
1357  _argvec[5] = (unsigned long)(arg5); \
1358  _argvec[6] = (unsigned long)(arg6); \
1359  _argvec[7] = (unsigned long)(arg7); \
1360  _argvec[8] = (unsigned long)(arg8); \
1361  _argvec[9] = (unsigned long)(arg9); \
1362  _argvec[10] = (unsigned long)(arg10); \
1363  __asm__ volatile( \
1364  VALGRIND_ALIGN_STACK \
1365  "subl $8, %%esp\n\t" \
1366  "pushl 40(%%eax)\n\t" \
1367  "pushl 36(%%eax)\n\t" \
1368  "pushl 32(%%eax)\n\t" \
1369  "pushl 28(%%eax)\n\t" \
1370  "pushl 24(%%eax)\n\t" \
1371  "pushl 20(%%eax)\n\t" \
1372  "pushl 16(%%eax)\n\t" \
1373  "pushl 12(%%eax)\n\t" \
1374  "pushl 8(%%eax)\n\t" \
1375  "pushl 4(%%eax)\n\t" \
1376  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1377  VALGRIND_CALL_NOREDIR_EAX \
1378  VALGRIND_RESTORE_STACK \
1379  : /*out*/ "=a" (_res) \
1380  : /*in*/ "a" (&_argvec[0]) \
1381  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1382  ); \
1383  lval = (__typeof__(lval)) _res; \
1384  } while (0)
1385 
1386 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1387  arg6,arg7,arg8,arg9,arg10, \
1388  arg11) \
1389  do { \
1390  volatile OrigFn _orig = (orig); \
1391  volatile unsigned long _argvec[12]; \
1392  volatile unsigned long _res; \
1393  _argvec[0] = (unsigned long)_orig.nraddr; \
1394  _argvec[1] = (unsigned long)(arg1); \
1395  _argvec[2] = (unsigned long)(arg2); \
1396  _argvec[3] = (unsigned long)(arg3); \
1397  _argvec[4] = (unsigned long)(arg4); \
1398  _argvec[5] = (unsigned long)(arg5); \
1399  _argvec[6] = (unsigned long)(arg6); \
1400  _argvec[7] = (unsigned long)(arg7); \
1401  _argvec[8] = (unsigned long)(arg8); \
1402  _argvec[9] = (unsigned long)(arg9); \
1403  _argvec[10] = (unsigned long)(arg10); \
1404  _argvec[11] = (unsigned long)(arg11); \
1405  __asm__ volatile( \
1406  VALGRIND_ALIGN_STACK \
1407  "subl $4, %%esp\n\t" \
1408  "pushl 44(%%eax)\n\t" \
1409  "pushl 40(%%eax)\n\t" \
1410  "pushl 36(%%eax)\n\t" \
1411  "pushl 32(%%eax)\n\t" \
1412  "pushl 28(%%eax)\n\t" \
1413  "pushl 24(%%eax)\n\t" \
1414  "pushl 20(%%eax)\n\t" \
1415  "pushl 16(%%eax)\n\t" \
1416  "pushl 12(%%eax)\n\t" \
1417  "pushl 8(%%eax)\n\t" \
1418  "pushl 4(%%eax)\n\t" \
1419  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1420  VALGRIND_CALL_NOREDIR_EAX \
1421  VALGRIND_RESTORE_STACK \
1422  : /*out*/ "=a" (_res) \
1423  : /*in*/ "a" (&_argvec[0]) \
1424  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1425  ); \
1426  lval = (__typeof__(lval)) _res; \
1427  } while (0)
1428 
1429 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1430  arg6,arg7,arg8,arg9,arg10, \
1431  arg11,arg12) \
1432  do { \
1433  volatile OrigFn _orig = (orig); \
1434  volatile unsigned long _argvec[13]; \
1435  volatile unsigned long _res; \
1436  _argvec[0] = (unsigned long)_orig.nraddr; \
1437  _argvec[1] = (unsigned long)(arg1); \
1438  _argvec[2] = (unsigned long)(arg2); \
1439  _argvec[3] = (unsigned long)(arg3); \
1440  _argvec[4] = (unsigned long)(arg4); \
1441  _argvec[5] = (unsigned long)(arg5); \
1442  _argvec[6] = (unsigned long)(arg6); \
1443  _argvec[7] = (unsigned long)(arg7); \
1444  _argvec[8] = (unsigned long)(arg8); \
1445  _argvec[9] = (unsigned long)(arg9); \
1446  _argvec[10] = (unsigned long)(arg10); \
1447  _argvec[11] = (unsigned long)(arg11); \
1448  _argvec[12] = (unsigned long)(arg12); \
1449  __asm__ volatile( \
1450  VALGRIND_ALIGN_STACK \
1451  "pushl 48(%%eax)\n\t" \
1452  "pushl 44(%%eax)\n\t" \
1453  "pushl 40(%%eax)\n\t" \
1454  "pushl 36(%%eax)\n\t" \
1455  "pushl 32(%%eax)\n\t" \
1456  "pushl 28(%%eax)\n\t" \
1457  "pushl 24(%%eax)\n\t" \
1458  "pushl 20(%%eax)\n\t" \
1459  "pushl 16(%%eax)\n\t" \
1460  "pushl 12(%%eax)\n\t" \
1461  "pushl 8(%%eax)\n\t" \
1462  "pushl 4(%%eax)\n\t" \
1463  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1464  VALGRIND_CALL_NOREDIR_EAX \
1465  VALGRIND_RESTORE_STACK \
1466  : /*out*/ "=a" (_res) \
1467  : /*in*/ "a" (&_argvec[0]) \
1468  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1469  ); \
1470  lval = (__typeof__(lval)) _res; \
1471  } while (0)
1472 
1473 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1474 
1475 /* ------------------------ amd64-{linux,darwin} --------------- */
1476 
1477 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1478 
1479 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1480 
1481 /* These regs are trashed by the hidden call. */
1482 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1483  "rdi", "r8", "r9", "r10", "r11"
1484 
1485 /* This is all pretty complex. It's so as to make stack unwinding
1486  work reliably. See bug 243270. The basic problem is the sub and
1487  add of 128 of %rsp in all of the following macros. If gcc believes
1488  the CFA is in %rsp, then unwinding may fail, because what's at the
1489  CFA is not what gcc "expected" when it constructs the CFIs for the
1490  places where the macros are instantiated.
1491 
1492  But we can't just add a CFI annotation to increase the CFA offset
1493  by 128, to match the sub of 128 from %rsp, because we don't know
1494  whether gcc has chosen %rsp as the CFA at that point, or whether it
1495  has chosen some other register (eg, %rbp). In the latter case,
1496  adding a CFI annotation to change the CFA offset is simply wrong.
1497 
1498  So the solution is to get hold of the CFA using
1499  __builtin_dwarf_cfa(), put it in a known register, and add a
1500  CFI annotation to say what the register is. We choose %rbp for
1501  this (perhaps perversely), because:
1502 
1503  (1) %rbp is already subject to unwinding. If a new register was
1504  chosen then the unwinder would have to unwind it in all stack
1505  traces, which is expensive, and
1506 
1507  (2) %rbp is already subject to precise exception updates in the
1508  JIT. If a new register was chosen, we'd have to have precise
1509  exceptions for it too, which reduces performance of the
1510  generated code.
1511 
1512  However .. one extra complication. We can't just whack the result
1513  of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1514  list of trashed registers at the end of the inline assembly
1515  fragments; gcc won't allow %rbp to appear in that list. Hence
1516  instead we need to stash %rbp in %r15 for the duration of the asm,
1517  and say that %r15 is trashed instead. gcc seems happy to go with
1518  that.
1519 
1520  Oh .. and this all needs to be conditionalised so that it is
1521  unchanged from before this commit, when compiled with older gccs
1522  that don't support __builtin_dwarf_cfa. Furthermore, since
1523  this header file is freestanding, it has to be independent of
1524  config.h, and so the following conditionalisation cannot depend on
1525  configure time checks.
1526 
1527  Although it's not clear from
1528  'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1529  this expression excludes Darwin.
1530  .cfi directives in Darwin assembly appear to be completely
1531  different and I haven't investigated how they work.
1532 
1533  For even more entertainment value, note we have to use the
1534  completely undocumented __builtin_dwarf_cfa(), which appears to
1535  really compute the CFA, whereas __builtin_frame_address(0) claims
1536  to but actually doesn't. See
1537  https://bugs.kde.org/show_bug.cgi?id=243270#c47
1538 */
1539 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1540 # define __FRAME_POINTER \
1541  ,"r"(__builtin_dwarf_cfa())
1542 # define VALGRIND_CFI_PROLOGUE \
1543  "movq %%rbp, %%r15\n\t" \
1544  "movq %2, %%rbp\n\t" \
1545  ".cfi_remember_state\n\t" \
1546  ".cfi_def_cfa rbp, 0\n\t"
1547 # define VALGRIND_CFI_EPILOGUE \
1548  "movq %%r15, %%rbp\n\t" \
1549  ".cfi_restore_state\n\t"
1550 #else
1551 # define __FRAME_POINTER
1552 # define VALGRIND_CFI_PROLOGUE
1553 # define VALGRIND_CFI_EPILOGUE
1554 #endif
1555 
1556 /* Macros to save and align the stack before making a function
1557  call and restore it afterwards as gcc may not keep the stack
1558  pointer aligned if it doesn't realise calls are being made
1559  to other functions. */
1560 
1561 #define VALGRIND_ALIGN_STACK \
1562  "movq %%rsp,%%r14\n\t" \
1563  "andq $0xfffffffffffffff0,%%rsp\n\t"
1564 #define VALGRIND_RESTORE_STACK \
1565  "movq %%r14,%%rsp\n\t"
1566 
1567 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1568  long) == 8. */
1569 
1570 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1571  macros. In order not to trash the stack redzone, we need to drop
1572  %rsp by 128 before the hidden call, and restore afterwards. The
1573  nastiness is that it is only by luck that the stack still appears
1574  to be unwindable during the hidden call - since then the behaviour
1575  of any routine using this macro does not match what the CFI data
1576  says. Sigh.
1577 
1578  Why is this important? Imagine that a wrapper has a stack
1579  allocated local, and passes to the hidden call, a pointer to it.
1580  Because gcc does not know about the hidden call, it may allocate
1581  that local in the redzone. Unfortunately the hidden call may then
1582  trash it before it comes to use it. So we must step clear of the
1583  redzone, for the duration of the hidden call, to make it safe.
1584 
1585  Probably the same problem afflicts the other redzone-style ABIs too
1586  (ppc64-linux); but for those, the stack is
1587  self describing (none of this CFI nonsense) so at least messing
1588  with the stack pointer doesn't give a danger of non-unwindable
1589  stack. */
1590 
1591 #define CALL_FN_W_v(lval, orig) \
1592  do { \
1593  volatile OrigFn _orig = (orig); \
1594  volatile unsigned long _argvec[1]; \
1595  volatile unsigned long _res; \
1596  _argvec[0] = (unsigned long)_orig.nraddr; \
1597  __asm__ volatile( \
1598  VALGRIND_CFI_PROLOGUE \
1599  VALGRIND_ALIGN_STACK \
1600  "subq $128,%%rsp\n\t" \
1601  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1602  VALGRIND_CALL_NOREDIR_RAX \
1603  VALGRIND_RESTORE_STACK \
1604  VALGRIND_CFI_EPILOGUE \
1605  : /*out*/ "=a" (_res) \
1606  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1607  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1608  ); \
1609  lval = (__typeof__(lval)) _res; \
1610  } while (0)
1611 
1612 #define CALL_FN_W_W(lval, orig, arg1) \
1613  do { \
1614  volatile OrigFn _orig = (orig); \
1615  volatile unsigned long _argvec[2]; \
1616  volatile unsigned long _res; \
1617  _argvec[0] = (unsigned long)_orig.nraddr; \
1618  _argvec[1] = (unsigned long)(arg1); \
1619  __asm__ volatile( \
1620  VALGRIND_CFI_PROLOGUE \
1621  VALGRIND_ALIGN_STACK \
1622  "subq $128,%%rsp\n\t" \
1623  "movq 8(%%rax), %%rdi\n\t" \
1624  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1625  VALGRIND_CALL_NOREDIR_RAX \
1626  VALGRIND_RESTORE_STACK \
1627  VALGRIND_CFI_EPILOGUE \
1628  : /*out*/ "=a" (_res) \
1629  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1630  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1631  ); \
1632  lval = (__typeof__(lval)) _res; \
1633  } while (0)
1634 
1635 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1636  do { \
1637  volatile OrigFn _orig = (orig); \
1638  volatile unsigned long _argvec[3]; \
1639  volatile unsigned long _res; \
1640  _argvec[0] = (unsigned long)_orig.nraddr; \
1641  _argvec[1] = (unsigned long)(arg1); \
1642  _argvec[2] = (unsigned long)(arg2); \
1643  __asm__ volatile( \
1644  VALGRIND_CFI_PROLOGUE \
1645  VALGRIND_ALIGN_STACK \
1646  "subq $128,%%rsp\n\t" \
1647  "movq 16(%%rax), %%rsi\n\t" \
1648  "movq 8(%%rax), %%rdi\n\t" \
1649  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1650  VALGRIND_CALL_NOREDIR_RAX \
1651  VALGRIND_RESTORE_STACK \
1652  VALGRIND_CFI_EPILOGUE \
1653  : /*out*/ "=a" (_res) \
1654  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1655  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1656  ); \
1657  lval = (__typeof__(lval)) _res; \
1658  } while (0)
1659 
1660 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1661  do { \
1662  volatile OrigFn _orig = (orig); \
1663  volatile unsigned long _argvec[4]; \
1664  volatile unsigned long _res; \
1665  _argvec[0] = (unsigned long)_orig.nraddr; \
1666  _argvec[1] = (unsigned long)(arg1); \
1667  _argvec[2] = (unsigned long)(arg2); \
1668  _argvec[3] = (unsigned long)(arg3); \
1669  __asm__ volatile( \
1670  VALGRIND_CFI_PROLOGUE \
1671  VALGRIND_ALIGN_STACK \
1672  "subq $128,%%rsp\n\t" \
1673  "movq 24(%%rax), %%rdx\n\t" \
1674  "movq 16(%%rax), %%rsi\n\t" \
1675  "movq 8(%%rax), %%rdi\n\t" \
1676  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1677  VALGRIND_CALL_NOREDIR_RAX \
1678  VALGRIND_RESTORE_STACK \
1679  VALGRIND_CFI_EPILOGUE \
1680  : /*out*/ "=a" (_res) \
1681  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1682  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1683  ); \
1684  lval = (__typeof__(lval)) _res; \
1685  } while (0)
1686 
1687 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1688  do { \
1689  volatile OrigFn _orig = (orig); \
1690  volatile unsigned long _argvec[5]; \
1691  volatile unsigned long _res; \
1692  _argvec[0] = (unsigned long)_orig.nraddr; \
1693  _argvec[1] = (unsigned long)(arg1); \
1694  _argvec[2] = (unsigned long)(arg2); \
1695  _argvec[3] = (unsigned long)(arg3); \
1696  _argvec[4] = (unsigned long)(arg4); \
1697  __asm__ volatile( \
1698  VALGRIND_CFI_PROLOGUE \
1699  VALGRIND_ALIGN_STACK \
1700  "subq $128,%%rsp\n\t" \
1701  "movq 32(%%rax), %%rcx\n\t" \
1702  "movq 24(%%rax), %%rdx\n\t" \
1703  "movq 16(%%rax), %%rsi\n\t" \
1704  "movq 8(%%rax), %%rdi\n\t" \
1705  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1706  VALGRIND_CALL_NOREDIR_RAX \
1707  VALGRIND_RESTORE_STACK \
1708  VALGRIND_CFI_EPILOGUE \
1709  : /*out*/ "=a" (_res) \
1710  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1711  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1712  ); \
1713  lval = (__typeof__(lval)) _res; \
1714  } while (0)
1715 
1716 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1717  do { \
1718  volatile OrigFn _orig = (orig); \
1719  volatile unsigned long _argvec[6]; \
1720  volatile unsigned long _res; \
1721  _argvec[0] = (unsigned long)_orig.nraddr; \
1722  _argvec[1] = (unsigned long)(arg1); \
1723  _argvec[2] = (unsigned long)(arg2); \
1724  _argvec[3] = (unsigned long)(arg3); \
1725  _argvec[4] = (unsigned long)(arg4); \
1726  _argvec[5] = (unsigned long)(arg5); \
1727  __asm__ volatile( \
1728  VALGRIND_CFI_PROLOGUE \
1729  VALGRIND_ALIGN_STACK \
1730  "subq $128,%%rsp\n\t" \
1731  "movq 40(%%rax), %%r8\n\t" \
1732  "movq 32(%%rax), %%rcx\n\t" \
1733  "movq 24(%%rax), %%rdx\n\t" \
1734  "movq 16(%%rax), %%rsi\n\t" \
1735  "movq 8(%%rax), %%rdi\n\t" \
1736  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1737  VALGRIND_CALL_NOREDIR_RAX \
1738  VALGRIND_RESTORE_STACK \
1739  VALGRIND_CFI_EPILOGUE \
1740  : /*out*/ "=a" (_res) \
1741  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1742  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1743  ); \
1744  lval = (__typeof__(lval)) _res; \
1745  } while (0)
1746 
1747 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1748  do { \
1749  volatile OrigFn _orig = (orig); \
1750  volatile unsigned long _argvec[7]; \
1751  volatile unsigned long _res; \
1752  _argvec[0] = (unsigned long)_orig.nraddr; \
1753  _argvec[1] = (unsigned long)(arg1); \
1754  _argvec[2] = (unsigned long)(arg2); \
1755  _argvec[3] = (unsigned long)(arg3); \
1756  _argvec[4] = (unsigned long)(arg4); \
1757  _argvec[5] = (unsigned long)(arg5); \
1758  _argvec[6] = (unsigned long)(arg6); \
1759  __asm__ volatile( \
1760  VALGRIND_CFI_PROLOGUE \
1761  VALGRIND_ALIGN_STACK \
1762  "subq $128,%%rsp\n\t" \
1763  "movq 48(%%rax), %%r9\n\t" \
1764  "movq 40(%%rax), %%r8\n\t" \
1765  "movq 32(%%rax), %%rcx\n\t" \
1766  "movq 24(%%rax), %%rdx\n\t" \
1767  "movq 16(%%rax), %%rsi\n\t" \
1768  "movq 8(%%rax), %%rdi\n\t" \
1769  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1770  VALGRIND_CALL_NOREDIR_RAX \
1771  VALGRIND_RESTORE_STACK \
1772  VALGRIND_CFI_EPILOGUE \
1773  : /*out*/ "=a" (_res) \
1774  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1775  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1776  ); \
1777  lval = (__typeof__(lval)) _res; \
1778  } while (0)
1779 
1780 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1781  arg7) \
1782  do { \
1783  volatile OrigFn _orig = (orig); \
1784  volatile unsigned long _argvec[8]; \
1785  volatile unsigned long _res; \
1786  _argvec[0] = (unsigned long)_orig.nraddr; \
1787  _argvec[1] = (unsigned long)(arg1); \
1788  _argvec[2] = (unsigned long)(arg2); \
1789  _argvec[3] = (unsigned long)(arg3); \
1790  _argvec[4] = (unsigned long)(arg4); \
1791  _argvec[5] = (unsigned long)(arg5); \
1792  _argvec[6] = (unsigned long)(arg6); \
1793  _argvec[7] = (unsigned long)(arg7); \
1794  __asm__ volatile( \
1795  VALGRIND_CFI_PROLOGUE \
1796  VALGRIND_ALIGN_STACK \
1797  "subq $136,%%rsp\n\t" \
1798  "pushq 56(%%rax)\n\t" \
1799  "movq 48(%%rax), %%r9\n\t" \
1800  "movq 40(%%rax), %%r8\n\t" \
1801  "movq 32(%%rax), %%rcx\n\t" \
1802  "movq 24(%%rax), %%rdx\n\t" \
1803  "movq 16(%%rax), %%rsi\n\t" \
1804  "movq 8(%%rax), %%rdi\n\t" \
1805  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1806  VALGRIND_CALL_NOREDIR_RAX \
1807  VALGRIND_RESTORE_STACK \
1808  VALGRIND_CFI_EPILOGUE \
1809  : /*out*/ "=a" (_res) \
1810  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1811  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1812  ); \
1813  lval = (__typeof__(lval)) _res; \
1814  } while (0)
1815 
1816 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1817  arg7,arg8) \
1818  do { \
1819  volatile OrigFn _orig = (orig); \
1820  volatile unsigned long _argvec[9]; \
1821  volatile unsigned long _res; \
1822  _argvec[0] = (unsigned long)_orig.nraddr; \
1823  _argvec[1] = (unsigned long)(arg1); \
1824  _argvec[2] = (unsigned long)(arg2); \
1825  _argvec[3] = (unsigned long)(arg3); \
1826  _argvec[4] = (unsigned long)(arg4); \
1827  _argvec[5] = (unsigned long)(arg5); \
1828  _argvec[6] = (unsigned long)(arg6); \
1829  _argvec[7] = (unsigned long)(arg7); \
1830  _argvec[8] = (unsigned long)(arg8); \
1831  __asm__ volatile( \
1832  VALGRIND_CFI_PROLOGUE \
1833  VALGRIND_ALIGN_STACK \
1834  "subq $128,%%rsp\n\t" \
1835  "pushq 64(%%rax)\n\t" \
1836  "pushq 56(%%rax)\n\t" \
1837  "movq 48(%%rax), %%r9\n\t" \
1838  "movq 40(%%rax), %%r8\n\t" \
1839  "movq 32(%%rax), %%rcx\n\t" \
1840  "movq 24(%%rax), %%rdx\n\t" \
1841  "movq 16(%%rax), %%rsi\n\t" \
1842  "movq 8(%%rax), %%rdi\n\t" \
1843  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1844  VALGRIND_CALL_NOREDIR_RAX \
1845  VALGRIND_RESTORE_STACK \
1846  VALGRIND_CFI_EPILOGUE \
1847  : /*out*/ "=a" (_res) \
1848  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1849  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1850  ); \
1851  lval = (__typeof__(lval)) _res; \
1852  } while (0)
1853 
1854 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1855  arg7,arg8,arg9) \
1856  do { \
1857  volatile OrigFn _orig = (orig); \
1858  volatile unsigned long _argvec[10]; \
1859  volatile unsigned long _res; \
1860  _argvec[0] = (unsigned long)_orig.nraddr; \
1861  _argvec[1] = (unsigned long)(arg1); \
1862  _argvec[2] = (unsigned long)(arg2); \
1863  _argvec[3] = (unsigned long)(arg3); \
1864  _argvec[4] = (unsigned long)(arg4); \
1865  _argvec[5] = (unsigned long)(arg5); \
1866  _argvec[6] = (unsigned long)(arg6); \
1867  _argvec[7] = (unsigned long)(arg7); \
1868  _argvec[8] = (unsigned long)(arg8); \
1869  _argvec[9] = (unsigned long)(arg9); \
1870  __asm__ volatile( \
1871  VALGRIND_CFI_PROLOGUE \
1872  VALGRIND_ALIGN_STACK \
1873  "subq $136,%%rsp\n\t" \
1874  "pushq 72(%%rax)\n\t" \
1875  "pushq 64(%%rax)\n\t" \
1876  "pushq 56(%%rax)\n\t" \
1877  "movq 48(%%rax), %%r9\n\t" \
1878  "movq 40(%%rax), %%r8\n\t" \
1879  "movq 32(%%rax), %%rcx\n\t" \
1880  "movq 24(%%rax), %%rdx\n\t" \
1881  "movq 16(%%rax), %%rsi\n\t" \
1882  "movq 8(%%rax), %%rdi\n\t" \
1883  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1884  VALGRIND_CALL_NOREDIR_RAX \
1885  VALGRIND_RESTORE_STACK \
1886  VALGRIND_CFI_EPILOGUE \
1887  : /*out*/ "=a" (_res) \
1888  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1889  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1890  ); \
1891  lval = (__typeof__(lval)) _res; \
1892  } while (0)
1893 
1894 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1895  arg7,arg8,arg9,arg10) \
1896  do { \
1897  volatile OrigFn _orig = (orig); \
1898  volatile unsigned long _argvec[11]; \
1899  volatile unsigned long _res; \
1900  _argvec[0] = (unsigned long)_orig.nraddr; \
1901  _argvec[1] = (unsigned long)(arg1); \
1902  _argvec[2] = (unsigned long)(arg2); \
1903  _argvec[3] = (unsigned long)(arg3); \
1904  _argvec[4] = (unsigned long)(arg4); \
1905  _argvec[5] = (unsigned long)(arg5); \
1906  _argvec[6] = (unsigned long)(arg6); \
1907  _argvec[7] = (unsigned long)(arg7); \
1908  _argvec[8] = (unsigned long)(arg8); \
1909  _argvec[9] = (unsigned long)(arg9); \
1910  _argvec[10] = (unsigned long)(arg10); \
1911  __asm__ volatile( \
1912  VALGRIND_CFI_PROLOGUE \
1913  VALGRIND_ALIGN_STACK \
1914  "subq $128,%%rsp\n\t" \
1915  "pushq 80(%%rax)\n\t" \
1916  "pushq 72(%%rax)\n\t" \
1917  "pushq 64(%%rax)\n\t" \
1918  "pushq 56(%%rax)\n\t" \
1919  "movq 48(%%rax), %%r9\n\t" \
1920  "movq 40(%%rax), %%r8\n\t" \
1921  "movq 32(%%rax), %%rcx\n\t" \
1922  "movq 24(%%rax), %%rdx\n\t" \
1923  "movq 16(%%rax), %%rsi\n\t" \
1924  "movq 8(%%rax), %%rdi\n\t" \
1925  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1926  VALGRIND_CALL_NOREDIR_RAX \
1927  VALGRIND_RESTORE_STACK \
1928  VALGRIND_CFI_EPILOGUE \
1929  : /*out*/ "=a" (_res) \
1930  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1931  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1932  ); \
1933  lval = (__typeof__(lval)) _res; \
1934  } while (0)
1935 
1936 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1937  arg7,arg8,arg9,arg10,arg11) \
1938  do { \
1939  volatile OrigFn _orig = (orig); \
1940  volatile unsigned long _argvec[12]; \
1941  volatile unsigned long _res; \
1942  _argvec[0] = (unsigned long)_orig.nraddr; \
1943  _argvec[1] = (unsigned long)(arg1); \
1944  _argvec[2] = (unsigned long)(arg2); \
1945  _argvec[3] = (unsigned long)(arg3); \
1946  _argvec[4] = (unsigned long)(arg4); \
1947  _argvec[5] = (unsigned long)(arg5); \
1948  _argvec[6] = (unsigned long)(arg6); \
1949  _argvec[7] = (unsigned long)(arg7); \
1950  _argvec[8] = (unsigned long)(arg8); \
1951  _argvec[9] = (unsigned long)(arg9); \
1952  _argvec[10] = (unsigned long)(arg10); \
1953  _argvec[11] = (unsigned long)(arg11); \
1954  __asm__ volatile( \
1955  VALGRIND_CFI_PROLOGUE \
1956  VALGRIND_ALIGN_STACK \
1957  "subq $136,%%rsp\n\t" \
1958  "pushq 88(%%rax)\n\t" \
1959  "pushq 80(%%rax)\n\t" \
1960  "pushq 72(%%rax)\n\t" \
1961  "pushq 64(%%rax)\n\t" \
1962  "pushq 56(%%rax)\n\t" \
1963  "movq 48(%%rax), %%r9\n\t" \
1964  "movq 40(%%rax), %%r8\n\t" \
1965  "movq 32(%%rax), %%rcx\n\t" \
1966  "movq 24(%%rax), %%rdx\n\t" \
1967  "movq 16(%%rax), %%rsi\n\t" \
1968  "movq 8(%%rax), %%rdi\n\t" \
1969  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1970  VALGRIND_CALL_NOREDIR_RAX \
1971  VALGRIND_RESTORE_STACK \
1972  VALGRIND_CFI_EPILOGUE \
1973  : /*out*/ "=a" (_res) \
1974  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1975  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1976  ); \
1977  lval = (__typeof__(lval)) _res; \
1978  } while (0)
1979 
1980 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1981  arg7,arg8,arg9,arg10,arg11,arg12) \
1982  do { \
1983  volatile OrigFn _orig = (orig); \
1984  volatile unsigned long _argvec[13]; \
1985  volatile unsigned long _res; \
1986  _argvec[0] = (unsigned long)_orig.nraddr; \
1987  _argvec[1] = (unsigned long)(arg1); \
1988  _argvec[2] = (unsigned long)(arg2); \
1989  _argvec[3] = (unsigned long)(arg3); \
1990  _argvec[4] = (unsigned long)(arg4); \
1991  _argvec[5] = (unsigned long)(arg5); \
1992  _argvec[6] = (unsigned long)(arg6); \
1993  _argvec[7] = (unsigned long)(arg7); \
1994  _argvec[8] = (unsigned long)(arg8); \
1995  _argvec[9] = (unsigned long)(arg9); \
1996  _argvec[10] = (unsigned long)(arg10); \
1997  _argvec[11] = (unsigned long)(arg11); \
1998  _argvec[12] = (unsigned long)(arg12); \
1999  __asm__ volatile( \
2000  VALGRIND_CFI_PROLOGUE \
2001  VALGRIND_ALIGN_STACK \
2002  "subq $128,%%rsp\n\t" \
2003  "pushq 96(%%rax)\n\t" \
2004  "pushq 88(%%rax)\n\t" \
2005  "pushq 80(%%rax)\n\t" \
2006  "pushq 72(%%rax)\n\t" \
2007  "pushq 64(%%rax)\n\t" \
2008  "pushq 56(%%rax)\n\t" \
2009  "movq 48(%%rax), %%r9\n\t" \
2010  "movq 40(%%rax), %%r8\n\t" \
2011  "movq 32(%%rax), %%rcx\n\t" \
2012  "movq 24(%%rax), %%rdx\n\t" \
2013  "movq 16(%%rax), %%rsi\n\t" \
2014  "movq 8(%%rax), %%rdi\n\t" \
2015  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2016  VALGRIND_CALL_NOREDIR_RAX \
2017  VALGRIND_RESTORE_STACK \
2018  VALGRIND_CFI_EPILOGUE \
2019  : /*out*/ "=a" (_res) \
2020  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2021  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2022  ); \
2023  lval = (__typeof__(lval)) _res; \
2024  } while (0)
2025 
2026 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2027 
2028 /* ------------------------ ppc32-linux ------------------------ */
2029 
2030 #if defined(PLAT_ppc32_linux)
2031 
2032 /* This is useful for finding out about the on-stack stuff:
2033 
2034  extern int f9 ( int,int,int,int,int,int,int,int,int );
2035  extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2036  extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2037  extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2038 
2039  int g9 ( void ) {
2040  return f9(11,22,33,44,55,66,77,88,99);
2041  }
2042  int g10 ( void ) {
2043  return f10(11,22,33,44,55,66,77,88,99,110);
2044  }
2045  int g11 ( void ) {
2046  return f11(11,22,33,44,55,66,77,88,99,110,121);
2047  }
2048  int g12 ( void ) {
2049  return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2050  }
2051 */
2052 
2053 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2054 
2055 /* These regs are trashed by the hidden call. */
2056 #define __CALLER_SAVED_REGS \
2057  "lr", "ctr", "xer", \
2058  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2059  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2060  "r11", "r12", "r13"
2061 
2062 /* Macros to save and align the stack before making a function
2063  call and restore it afterwards as gcc may not keep the stack
2064  pointer aligned if it doesn't realise calls are being made
2065  to other functions. */
2066 
2067 #define VALGRIND_ALIGN_STACK \
2068  "mr 28,1\n\t" \
2069  "rlwinm 1,1,0,0,27\n\t"
2070 #define VALGRIND_RESTORE_STACK \
2071  "mr 1,28\n\t"
2072 
2073 /* These CALL_FN_ macros assume that on ppc32-linux,
2074  sizeof(unsigned long) == 4. */
2075 
2076 #define CALL_FN_W_v(lval, orig) \
2077  do { \
2078  volatile OrigFn _orig = (orig); \
2079  volatile unsigned long _argvec[1]; \
2080  volatile unsigned long _res; \
2081  _argvec[0] = (unsigned long)_orig.nraddr; \
2082  __asm__ volatile( \
2083  VALGRIND_ALIGN_STACK \
2084  "mr 11,%1\n\t" \
2085  "lwz 11,0(11)\n\t" /* target->r11 */ \
2086  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2087  VALGRIND_RESTORE_STACK \
2088  "mr %0,3" \
2089  : /*out*/ "=r" (_res) \
2090  : /*in*/ "r" (&_argvec[0]) \
2091  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2092  ); \
2093  lval = (__typeof__(lval)) _res; \
2094  } while (0)
2095 
2096 #define CALL_FN_W_W(lval, orig, arg1) \
2097  do { \
2098  volatile OrigFn _orig = (orig); \
2099  volatile unsigned long _argvec[2]; \
2100  volatile unsigned long _res; \
2101  _argvec[0] = (unsigned long)_orig.nraddr; \
2102  _argvec[1] = (unsigned long)arg1; \
2103  __asm__ volatile( \
2104  VALGRIND_ALIGN_STACK \
2105  "mr 11,%1\n\t" \
2106  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2107  "lwz 11,0(11)\n\t" /* target->r11 */ \
2108  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2109  VALGRIND_RESTORE_STACK \
2110  "mr %0,3" \
2111  : /*out*/ "=r" (_res) \
2112  : /*in*/ "r" (&_argvec[0]) \
2113  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2114  ); \
2115  lval = (__typeof__(lval)) _res; \
2116  } while (0)
2117 
2118 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2119  do { \
2120  volatile OrigFn _orig = (orig); \
2121  volatile unsigned long _argvec[3]; \
2122  volatile unsigned long _res; \
2123  _argvec[0] = (unsigned long)_orig.nraddr; \
2124  _argvec[1] = (unsigned long)arg1; \
2125  _argvec[2] = (unsigned long)arg2; \
2126  __asm__ volatile( \
2127  VALGRIND_ALIGN_STACK \
2128  "mr 11,%1\n\t" \
2129  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2130  "lwz 4,8(11)\n\t" \
2131  "lwz 11,0(11)\n\t" /* target->r11 */ \
2132  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2133  VALGRIND_RESTORE_STACK \
2134  "mr %0,3" \
2135  : /*out*/ "=r" (_res) \
2136  : /*in*/ "r" (&_argvec[0]) \
2137  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2138  ); \
2139  lval = (__typeof__(lval)) _res; \
2140  } while (0)
2141 
2142 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2143  do { \
2144  volatile OrigFn _orig = (orig); \
2145  volatile unsigned long _argvec[4]; \
2146  volatile unsigned long _res; \
2147  _argvec[0] = (unsigned long)_orig.nraddr; \
2148  _argvec[1] = (unsigned long)arg1; \
2149  _argvec[2] = (unsigned long)arg2; \
2150  _argvec[3] = (unsigned long)arg3; \
2151  __asm__ volatile( \
2152  VALGRIND_ALIGN_STACK \
2153  "mr 11,%1\n\t" \
2154  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2155  "lwz 4,8(11)\n\t" \
2156  "lwz 5,12(11)\n\t" \
2157  "lwz 11,0(11)\n\t" /* target->r11 */ \
2158  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2159  VALGRIND_RESTORE_STACK \
2160  "mr %0,3" \
2161  : /*out*/ "=r" (_res) \
2162  : /*in*/ "r" (&_argvec[0]) \
2163  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2164  ); \
2165  lval = (__typeof__(lval)) _res; \
2166  } while (0)
2167 
2168 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2169  do { \
2170  volatile OrigFn _orig = (orig); \
2171  volatile unsigned long _argvec[5]; \
2172  volatile unsigned long _res; \
2173  _argvec[0] = (unsigned long)_orig.nraddr; \
2174  _argvec[1] = (unsigned long)arg1; \
2175  _argvec[2] = (unsigned long)arg2; \
2176  _argvec[3] = (unsigned long)arg3; \
2177  _argvec[4] = (unsigned long)arg4; \
2178  __asm__ volatile( \
2179  VALGRIND_ALIGN_STACK \
2180  "mr 11,%1\n\t" \
2181  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2182  "lwz 4,8(11)\n\t" \
2183  "lwz 5,12(11)\n\t" \
2184  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2185  "lwz 11,0(11)\n\t" /* target->r11 */ \
2186  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2187  VALGRIND_RESTORE_STACK \
2188  "mr %0,3" \
2189  : /*out*/ "=r" (_res) \
2190  : /*in*/ "r" (&_argvec[0]) \
2191  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2192  ); \
2193  lval = (__typeof__(lval)) _res; \
2194  } while (0)
2195 
2196 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2197  do { \
2198  volatile OrigFn _orig = (orig); \
2199  volatile unsigned long _argvec[6]; \
2200  volatile unsigned long _res; \
2201  _argvec[0] = (unsigned long)_orig.nraddr; \
2202  _argvec[1] = (unsigned long)arg1; \
2203  _argvec[2] = (unsigned long)arg2; \
2204  _argvec[3] = (unsigned long)arg3; \
2205  _argvec[4] = (unsigned long)arg4; \
2206  _argvec[5] = (unsigned long)arg5; \
2207  __asm__ volatile( \
2208  VALGRIND_ALIGN_STACK \
2209  "mr 11,%1\n\t" \
2210  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2211  "lwz 4,8(11)\n\t" \
2212  "lwz 5,12(11)\n\t" \
2213  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2214  "lwz 7,20(11)\n\t" \
2215  "lwz 11,0(11)\n\t" /* target->r11 */ \
2216  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2217  VALGRIND_RESTORE_STACK \
2218  "mr %0,3" \
2219  : /*out*/ "=r" (_res) \
2220  : /*in*/ "r" (&_argvec[0]) \
2221  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2222  ); \
2223  lval = (__typeof__(lval)) _res; \
2224  } while (0)
2225 
2226 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2227  do { \
2228  volatile OrigFn _orig = (orig); \
2229  volatile unsigned long _argvec[7]; \
2230  volatile unsigned long _res; \
2231  _argvec[0] = (unsigned long)_orig.nraddr; \
2232  _argvec[1] = (unsigned long)arg1; \
2233  _argvec[2] = (unsigned long)arg2; \
2234  _argvec[3] = (unsigned long)arg3; \
2235  _argvec[4] = (unsigned long)arg4; \
2236  _argvec[5] = (unsigned long)arg5; \
2237  _argvec[6] = (unsigned long)arg6; \
2238  __asm__ volatile( \
2239  VALGRIND_ALIGN_STACK \
2240  "mr 11,%1\n\t" \
2241  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2242  "lwz 4,8(11)\n\t" \
2243  "lwz 5,12(11)\n\t" \
2244  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2245  "lwz 7,20(11)\n\t" \
2246  "lwz 8,24(11)\n\t" \
2247  "lwz 11,0(11)\n\t" /* target->r11 */ \
2248  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2249  VALGRIND_RESTORE_STACK \
2250  "mr %0,3" \
2251  : /*out*/ "=r" (_res) \
2252  : /*in*/ "r" (&_argvec[0]) \
2253  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2254  ); \
2255  lval = (__typeof__(lval)) _res; \
2256  } while (0)
2257 
2258 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2259  arg7) \
2260  do { \
2261  volatile OrigFn _orig = (orig); \
2262  volatile unsigned long _argvec[8]; \
2263  volatile unsigned long _res; \
2264  _argvec[0] = (unsigned long)_orig.nraddr; \
2265  _argvec[1] = (unsigned long)arg1; \
2266  _argvec[2] = (unsigned long)arg2; \
2267  _argvec[3] = (unsigned long)arg3; \
2268  _argvec[4] = (unsigned long)arg4; \
2269  _argvec[5] = (unsigned long)arg5; \
2270  _argvec[6] = (unsigned long)arg6; \
2271  _argvec[7] = (unsigned long)arg7; \
2272  __asm__ volatile( \
2273  VALGRIND_ALIGN_STACK \
2274  "mr 11,%1\n\t" \
2275  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2276  "lwz 4,8(11)\n\t" \
2277  "lwz 5,12(11)\n\t" \
2278  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2279  "lwz 7,20(11)\n\t" \
2280  "lwz 8,24(11)\n\t" \
2281  "lwz 9,28(11)\n\t" \
2282  "lwz 11,0(11)\n\t" /* target->r11 */ \
2283  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2284  VALGRIND_RESTORE_STACK \
2285  "mr %0,3" \
2286  : /*out*/ "=r" (_res) \
2287  : /*in*/ "r" (&_argvec[0]) \
2288  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2289  ); \
2290  lval = (__typeof__(lval)) _res; \
2291  } while (0)
2292 
2293 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2294  arg7,arg8) \
2295  do { \
2296  volatile OrigFn _orig = (orig); \
2297  volatile unsigned long _argvec[9]; \
2298  volatile unsigned long _res; \
2299  _argvec[0] = (unsigned long)_orig.nraddr; \
2300  _argvec[1] = (unsigned long)arg1; \
2301  _argvec[2] = (unsigned long)arg2; \
2302  _argvec[3] = (unsigned long)arg3; \
2303  _argvec[4] = (unsigned long)arg4; \
2304  _argvec[5] = (unsigned long)arg5; \
2305  _argvec[6] = (unsigned long)arg6; \
2306  _argvec[7] = (unsigned long)arg7; \
2307  _argvec[8] = (unsigned long)arg8; \
2308  __asm__ volatile( \
2309  VALGRIND_ALIGN_STACK \
2310  "mr 11,%1\n\t" \
2311  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2312  "lwz 4,8(11)\n\t" \
2313  "lwz 5,12(11)\n\t" \
2314  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2315  "lwz 7,20(11)\n\t" \
2316  "lwz 8,24(11)\n\t" \
2317  "lwz 9,28(11)\n\t" \
2318  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2319  "lwz 11,0(11)\n\t" /* target->r11 */ \
2320  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2321  VALGRIND_RESTORE_STACK \
2322  "mr %0,3" \
2323  : /*out*/ "=r" (_res) \
2324  : /*in*/ "r" (&_argvec[0]) \
2325  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2326  ); \
2327  lval = (__typeof__(lval)) _res; \
2328  } while (0)
2329 
2330 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2331  arg7,arg8,arg9) \
2332  do { \
2333  volatile OrigFn _orig = (orig); \
2334  volatile unsigned long _argvec[10]; \
2335  volatile unsigned long _res; \
2336  _argvec[0] = (unsigned long)_orig.nraddr; \
2337  _argvec[1] = (unsigned long)arg1; \
2338  _argvec[2] = (unsigned long)arg2; \
2339  _argvec[3] = (unsigned long)arg3; \
2340  _argvec[4] = (unsigned long)arg4; \
2341  _argvec[5] = (unsigned long)arg5; \
2342  _argvec[6] = (unsigned long)arg6; \
2343  _argvec[7] = (unsigned long)arg7; \
2344  _argvec[8] = (unsigned long)arg8; \
2345  _argvec[9] = (unsigned long)arg9; \
2346  __asm__ volatile( \
2347  VALGRIND_ALIGN_STACK \
2348  "mr 11,%1\n\t" \
2349  "addi 1,1,-16\n\t" \
2350  /* arg9 */ \
2351  "lwz 3,36(11)\n\t" \
2352  "stw 3,8(1)\n\t" \
2353  /* args1-8 */ \
2354  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2355  "lwz 4,8(11)\n\t" \
2356  "lwz 5,12(11)\n\t" \
2357  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2358  "lwz 7,20(11)\n\t" \
2359  "lwz 8,24(11)\n\t" \
2360  "lwz 9,28(11)\n\t" \
2361  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2362  "lwz 11,0(11)\n\t" /* target->r11 */ \
2363  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2364  VALGRIND_RESTORE_STACK \
2365  "mr %0,3" \
2366  : /*out*/ "=r" (_res) \
2367  : /*in*/ "r" (&_argvec[0]) \
2368  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2369  ); \
2370  lval = (__typeof__(lval)) _res; \
2371  } while (0)
2372 
2373 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2374  arg7,arg8,arg9,arg10) \
2375  do { \
2376  volatile OrigFn _orig = (orig); \
2377  volatile unsigned long _argvec[11]; \
2378  volatile unsigned long _res; \
2379  _argvec[0] = (unsigned long)_orig.nraddr; \
2380  _argvec[1] = (unsigned long)arg1; \
2381  _argvec[2] = (unsigned long)arg2; \
2382  _argvec[3] = (unsigned long)arg3; \
2383  _argvec[4] = (unsigned long)arg4; \
2384  _argvec[5] = (unsigned long)arg5; \
2385  _argvec[6] = (unsigned long)arg6; \
2386  _argvec[7] = (unsigned long)arg7; \
2387  _argvec[8] = (unsigned long)arg8; \
2388  _argvec[9] = (unsigned long)arg9; \
2389  _argvec[10] = (unsigned long)arg10; \
2390  __asm__ volatile( \
2391  VALGRIND_ALIGN_STACK \
2392  "mr 11,%1\n\t" \
2393  "addi 1,1,-16\n\t" \
2394  /* arg10 */ \
2395  "lwz 3,40(11)\n\t" \
2396  "stw 3,12(1)\n\t" \
2397  /* arg9 */ \
2398  "lwz 3,36(11)\n\t" \
2399  "stw 3,8(1)\n\t" \
2400  /* args1-8 */ \
2401  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2402  "lwz 4,8(11)\n\t" \
2403  "lwz 5,12(11)\n\t" \
2404  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2405  "lwz 7,20(11)\n\t" \
2406  "lwz 8,24(11)\n\t" \
2407  "lwz 9,28(11)\n\t" \
2408  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2409  "lwz 11,0(11)\n\t" /* target->r11 */ \
2410  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2411  VALGRIND_RESTORE_STACK \
2412  "mr %0,3" \
2413  : /*out*/ "=r" (_res) \
2414  : /*in*/ "r" (&_argvec[0]) \
2415  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2416  ); \
2417  lval = (__typeof__(lval)) _res; \
2418  } while (0)
2419 
2420 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2421  arg7,arg8,arg9,arg10,arg11) \
2422  do { \
2423  volatile OrigFn _orig = (orig); \
2424  volatile unsigned long _argvec[12]; \
2425  volatile unsigned long _res; \
2426  _argvec[0] = (unsigned long)_orig.nraddr; \
2427  _argvec[1] = (unsigned long)arg1; \
2428  _argvec[2] = (unsigned long)arg2; \
2429  _argvec[3] = (unsigned long)arg3; \
2430  _argvec[4] = (unsigned long)arg4; \
2431  _argvec[5] = (unsigned long)arg5; \
2432  _argvec[6] = (unsigned long)arg6; \
2433  _argvec[7] = (unsigned long)arg7; \
2434  _argvec[8] = (unsigned long)arg8; \
2435  _argvec[9] = (unsigned long)arg9; \
2436  _argvec[10] = (unsigned long)arg10; \
2437  _argvec[11] = (unsigned long)arg11; \
2438  __asm__ volatile( \
2439  VALGRIND_ALIGN_STACK \
2440  "mr 11,%1\n\t" \
2441  "addi 1,1,-32\n\t" \
2442  /* arg11 */ \
2443  "lwz 3,44(11)\n\t" \
2444  "stw 3,16(1)\n\t" \
2445  /* arg10 */ \
2446  "lwz 3,40(11)\n\t" \
2447  "stw 3,12(1)\n\t" \
2448  /* arg9 */ \
2449  "lwz 3,36(11)\n\t" \
2450  "stw 3,8(1)\n\t" \
2451  /* args1-8 */ \
2452  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2453  "lwz 4,8(11)\n\t" \
2454  "lwz 5,12(11)\n\t" \
2455  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2456  "lwz 7,20(11)\n\t" \
2457  "lwz 8,24(11)\n\t" \
2458  "lwz 9,28(11)\n\t" \
2459  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2460  "lwz 11,0(11)\n\t" /* target->r11 */ \
2461  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2462  VALGRIND_RESTORE_STACK \
2463  "mr %0,3" \
2464  : /*out*/ "=r" (_res) \
2465  : /*in*/ "r" (&_argvec[0]) \
2466  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2467  ); \
2468  lval = (__typeof__(lval)) _res; \
2469  } while (0)
2470 
2471 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2472  arg7,arg8,arg9,arg10,arg11,arg12) \
2473  do { \
2474  volatile OrigFn _orig = (orig); \
2475  volatile unsigned long _argvec[13]; \
2476  volatile unsigned long _res; \
2477  _argvec[0] = (unsigned long)_orig.nraddr; \
2478  _argvec[1] = (unsigned long)arg1; \
2479  _argvec[2] = (unsigned long)arg2; \
2480  _argvec[3] = (unsigned long)arg3; \
2481  _argvec[4] = (unsigned long)arg4; \
2482  _argvec[5] = (unsigned long)arg5; \
2483  _argvec[6] = (unsigned long)arg6; \
2484  _argvec[7] = (unsigned long)arg7; \
2485  _argvec[8] = (unsigned long)arg8; \
2486  _argvec[9] = (unsigned long)arg9; \
2487  _argvec[10] = (unsigned long)arg10; \
2488  _argvec[11] = (unsigned long)arg11; \
2489  _argvec[12] = (unsigned long)arg12; \
2490  __asm__ volatile( \
2491  VALGRIND_ALIGN_STACK \
2492  "mr 11,%1\n\t" \
2493  "addi 1,1,-32\n\t" \
2494  /* arg12 */ \
2495  "lwz 3,48(11)\n\t" \
2496  "stw 3,20(1)\n\t" \
2497  /* arg11 */ \
2498  "lwz 3,44(11)\n\t" \
2499  "stw 3,16(1)\n\t" \
2500  /* arg10 */ \
2501  "lwz 3,40(11)\n\t" \
2502  "stw 3,12(1)\n\t" \
2503  /* arg9 */ \
2504  "lwz 3,36(11)\n\t" \
2505  "stw 3,8(1)\n\t" \
2506  /* args1-8 */ \
2507  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2508  "lwz 4,8(11)\n\t" \
2509  "lwz 5,12(11)\n\t" \
2510  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2511  "lwz 7,20(11)\n\t" \
2512  "lwz 8,24(11)\n\t" \
2513  "lwz 9,28(11)\n\t" \
2514  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2515  "lwz 11,0(11)\n\t" /* target->r11 */ \
2516  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2517  VALGRIND_RESTORE_STACK \
2518  "mr %0,3" \
2519  : /*out*/ "=r" (_res) \
2520  : /*in*/ "r" (&_argvec[0]) \
2521  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2522  ); \
2523  lval = (__typeof__(lval)) _res; \
2524  } while (0)
2525 
2526 #endif /* PLAT_ppc32_linux */
2527 
2528 /* ------------------------ ppc64-linux ------------------------ */
2529 
2530 #if defined(PLAT_ppc64_linux)
2531 
2532 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2533 
2534 /* These regs are trashed by the hidden call. */
2535 #define __CALLER_SAVED_REGS \
2536  "lr", "ctr", "xer", \
2537  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2538  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2539  "r11", "r12", "r13"
2540 
2541 /* Macros to save and align the stack before making a function
2542  call and restore it afterwards as gcc may not keep the stack
2543  pointer aligned if it doesn't realise calls are being made
2544  to other functions. */
2545 
2546 #define VALGRIND_ALIGN_STACK \
2547  "mr 28,1\n\t" \
2548  "rldicr 1,1,0,59\n\t"
2549 #define VALGRIND_RESTORE_STACK \
2550  "mr 1,28\n\t"
2551 
2552 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2553  long) == 8. */
2554 
2555 #define CALL_FN_W_v(lval, orig) \
2556  do { \
2557  volatile OrigFn _orig = (orig); \
2558  volatile unsigned long _argvec[3+0]; \
2559  volatile unsigned long _res; \
2560  /* _argvec[0] holds current r2 across the call */ \
2561  _argvec[1] = (unsigned long)_orig.r2; \
2562  _argvec[2] = (unsigned long)_orig.nraddr; \
2563  __asm__ volatile( \
2564  VALGRIND_ALIGN_STACK \
2565  "mr 11,%1\n\t" \
2566  "std 2,-16(11)\n\t" /* save tocptr */ \
2567  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2568  "ld 11, 0(11)\n\t" /* target->r11 */ \
2569  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2570  "mr 11,%1\n\t" \
2571  "mr %0,3\n\t" \
2572  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2573  VALGRIND_RESTORE_STACK \
2574  : /*out*/ "=r" (_res) \
2575  : /*in*/ "r" (&_argvec[2]) \
2576  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2577  ); \
2578  lval = (__typeof__(lval)) _res; \
2579  } while (0)
2580 
2581 #define CALL_FN_W_W(lval, orig, arg1) \
2582  do { \
2583  volatile OrigFn _orig = (orig); \
2584  volatile unsigned long _argvec[3+1]; \
2585  volatile unsigned long _res; \
2586  /* _argvec[0] holds current r2 across the call */ \
2587  _argvec[1] = (unsigned long)_orig.r2; \
2588  _argvec[2] = (unsigned long)_orig.nraddr; \
2589  _argvec[2+1] = (unsigned long)arg1; \
2590  __asm__ volatile( \
2591  VALGRIND_ALIGN_STACK \
2592  "mr 11,%1\n\t" \
2593  "std 2,-16(11)\n\t" /* save tocptr */ \
2594  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2595  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2596  "ld 11, 0(11)\n\t" /* target->r11 */ \
2597  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2598  "mr 11,%1\n\t" \
2599  "mr %0,3\n\t" \
2600  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2601  VALGRIND_RESTORE_STACK \
2602  : /*out*/ "=r" (_res) \
2603  : /*in*/ "r" (&_argvec[2]) \
2604  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2605  ); \
2606  lval = (__typeof__(lval)) _res; \
2607  } while (0)
2608 
2609 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2610  do { \
2611  volatile OrigFn _orig = (orig); \
2612  volatile unsigned long _argvec[3+2]; \
2613  volatile unsigned long _res; \
2614  /* _argvec[0] holds current r2 across the call */ \
2615  _argvec[1] = (unsigned long)_orig.r2; \
2616  _argvec[2] = (unsigned long)_orig.nraddr; \
2617  _argvec[2+1] = (unsigned long)arg1; \
2618  _argvec[2+2] = (unsigned long)arg2; \
2619  __asm__ volatile( \
2620  VALGRIND_ALIGN_STACK \
2621  "mr 11,%1\n\t" \
2622  "std 2,-16(11)\n\t" /* save tocptr */ \
2623  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2624  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2625  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2626  "ld 11, 0(11)\n\t" /* target->r11 */ \
2627  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2628  "mr 11,%1\n\t" \
2629  "mr %0,3\n\t" \
2630  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2631  VALGRIND_RESTORE_STACK \
2632  : /*out*/ "=r" (_res) \
2633  : /*in*/ "r" (&_argvec[2]) \
2634  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2635  ); \
2636  lval = (__typeof__(lval)) _res; \
2637  } while (0)
2638 
2639 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2640  do { \
2641  volatile OrigFn _orig = (orig); \
2642  volatile unsigned long _argvec[3+3]; \
2643  volatile unsigned long _res; \
2644  /* _argvec[0] holds current r2 across the call */ \
2645  _argvec[1] = (unsigned long)_orig.r2; \
2646  _argvec[2] = (unsigned long)_orig.nraddr; \
2647  _argvec[2+1] = (unsigned long)arg1; \
2648  _argvec[2+2] = (unsigned long)arg2; \
2649  _argvec[2+3] = (unsigned long)arg3; \
2650  __asm__ volatile( \
2651  VALGRIND_ALIGN_STACK \
2652  "mr 11,%1\n\t" \
2653  "std 2,-16(11)\n\t" /* save tocptr */ \
2654  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2655  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2656  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2657  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2658  "ld 11, 0(11)\n\t" /* target->r11 */ \
2659  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2660  "mr 11,%1\n\t" \
2661  "mr %0,3\n\t" \
2662  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2663  VALGRIND_RESTORE_STACK \
2664  : /*out*/ "=r" (_res) \
2665  : /*in*/ "r" (&_argvec[2]) \
2666  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2667  ); \
2668  lval = (__typeof__(lval)) _res; \
2669  } while (0)
2670 
2671 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2672  do { \
2673  volatile OrigFn _orig = (orig); \
2674  volatile unsigned long _argvec[3+4]; \
2675  volatile unsigned long _res; \
2676  /* _argvec[0] holds current r2 across the call */ \
2677  _argvec[1] = (unsigned long)_orig.r2; \
2678  _argvec[2] = (unsigned long)_orig.nraddr; \
2679  _argvec[2+1] = (unsigned long)arg1; \
2680  _argvec[2+2] = (unsigned long)arg2; \
2681  _argvec[2+3] = (unsigned long)arg3; \
2682  _argvec[2+4] = (unsigned long)arg4; \
2683  __asm__ volatile( \
2684  VALGRIND_ALIGN_STACK \
2685  "mr 11,%1\n\t" \
2686  "std 2,-16(11)\n\t" /* save tocptr */ \
2687  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2688  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2689  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2690  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2691  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2692  "ld 11, 0(11)\n\t" /* target->r11 */ \
2693  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2694  "mr 11,%1\n\t" \
2695  "mr %0,3\n\t" \
2696  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2697  VALGRIND_RESTORE_STACK \
2698  : /*out*/ "=r" (_res) \
2699  : /*in*/ "r" (&_argvec[2]) \
2700  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2701  ); \
2702  lval = (__typeof__(lval)) _res; \
2703  } while (0)
2704 
2705 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2706  do { \
2707  volatile OrigFn _orig = (orig); \
2708  volatile unsigned long _argvec[3+5]; \
2709  volatile unsigned long _res; \
2710  /* _argvec[0] holds current r2 across the call */ \
2711  _argvec[1] = (unsigned long)_orig.r2; \
2712  _argvec[2] = (unsigned long)_orig.nraddr; \
2713  _argvec[2+1] = (unsigned long)arg1; \
2714  _argvec[2+2] = (unsigned long)arg2; \
2715  _argvec[2+3] = (unsigned long)arg3; \
2716  _argvec[2+4] = (unsigned long)arg4; \
2717  _argvec[2+5] = (unsigned long)arg5; \
2718  __asm__ volatile( \
2719  VALGRIND_ALIGN_STACK \
2720  "mr 11,%1\n\t" \
2721  "std 2,-16(11)\n\t" /* save tocptr */ \
2722  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2723  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2724  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2725  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2726  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2727  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2728  "ld 11, 0(11)\n\t" /* target->r11 */ \
2729  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2730  "mr 11,%1\n\t" \
2731  "mr %0,3\n\t" \
2732  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2733  VALGRIND_RESTORE_STACK \
2734  : /*out*/ "=r" (_res) \
2735  : /*in*/ "r" (&_argvec[2]) \
2736  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2737  ); \
2738  lval = (__typeof__(lval)) _res; \
2739  } while (0)
2740 
2741 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2742  do { \
2743  volatile OrigFn _orig = (orig); \
2744  volatile unsigned long _argvec[3+6]; \
2745  volatile unsigned long _res; \
2746  /* _argvec[0] holds current r2 across the call */ \
2747  _argvec[1] = (unsigned long)_orig.r2; \
2748  _argvec[2] = (unsigned long)_orig.nraddr; \
2749  _argvec[2+1] = (unsigned long)arg1; \
2750  _argvec[2+2] = (unsigned long)arg2; \
2751  _argvec[2+3] = (unsigned long)arg3; \
2752  _argvec[2+4] = (unsigned long)arg4; \
2753  _argvec[2+5] = (unsigned long)arg5; \
2754  _argvec[2+6] = (unsigned long)arg6; \
2755  __asm__ volatile( \
2756  VALGRIND_ALIGN_STACK \
2757  "mr 11,%1\n\t" \
2758  "std 2,-16(11)\n\t" /* save tocptr */ \
2759  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2760  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2761  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2762  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2763  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2764  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2765  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2766  "ld 11, 0(11)\n\t" /* target->r11 */ \
2767  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2768  "mr 11,%1\n\t" \
2769  "mr %0,3\n\t" \
2770  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2771  VALGRIND_RESTORE_STACK \
2772  : /*out*/ "=r" (_res) \
2773  : /*in*/ "r" (&_argvec[2]) \
2774  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2775  ); \
2776  lval = (__typeof__(lval)) _res; \
2777  } while (0)
2778 
2779 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2780  arg7) \
2781  do { \
2782  volatile OrigFn _orig = (orig); \
2783  volatile unsigned long _argvec[3+7]; \
2784  volatile unsigned long _res; \
2785  /* _argvec[0] holds current r2 across the call */ \
2786  _argvec[1] = (unsigned long)_orig.r2; \
2787  _argvec[2] = (unsigned long)_orig.nraddr; \
2788  _argvec[2+1] = (unsigned long)arg1; \
2789  _argvec[2+2] = (unsigned long)arg2; \
2790  _argvec[2+3] = (unsigned long)arg3; \
2791  _argvec[2+4] = (unsigned long)arg4; \
2792  _argvec[2+5] = (unsigned long)arg5; \
2793  _argvec[2+6] = (unsigned long)arg6; \
2794  _argvec[2+7] = (unsigned long)arg7; \
2795  __asm__ volatile( \
2796  VALGRIND_ALIGN_STACK \
2797  "mr 11,%1\n\t" \
2798  "std 2,-16(11)\n\t" /* save tocptr */ \
2799  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2800  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2801  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2802  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2803  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2804  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2805  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2806  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2807  "ld 11, 0(11)\n\t" /* target->r11 */ \
2808  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2809  "mr 11,%1\n\t" \
2810  "mr %0,3\n\t" \
2811  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2812  VALGRIND_RESTORE_STACK \
2813  : /*out*/ "=r" (_res) \
2814  : /*in*/ "r" (&_argvec[2]) \
2815  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2816  ); \
2817  lval = (__typeof__(lval)) _res; \
2818  } while (0)
2819 
2820 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2821  arg7,arg8) \
2822  do { \
2823  volatile OrigFn _orig = (orig); \
2824  volatile unsigned long _argvec[3+8]; \
2825  volatile unsigned long _res; \
2826  /* _argvec[0] holds current r2 across the call */ \
2827  _argvec[1] = (unsigned long)_orig.r2; \
2828  _argvec[2] = (unsigned long)_orig.nraddr; \
2829  _argvec[2+1] = (unsigned long)arg1; \
2830  _argvec[2+2] = (unsigned long)arg2; \
2831  _argvec[2+3] = (unsigned long)arg3; \
2832  _argvec[2+4] = (unsigned long)arg4; \
2833  _argvec[2+5] = (unsigned long)arg5; \
2834  _argvec[2+6] = (unsigned long)arg6; \
2835  _argvec[2+7] = (unsigned long)arg7; \
2836  _argvec[2+8] = (unsigned long)arg8; \
2837  __asm__ volatile( \
2838  VALGRIND_ALIGN_STACK \
2839  "mr 11,%1\n\t" \
2840  "std 2,-16(11)\n\t" /* save tocptr */ \
2841  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2842  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2843  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2844  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2845  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2846  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2847  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2848  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2849  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2850  "ld 11, 0(11)\n\t" /* target->r11 */ \
2851  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2852  "mr 11,%1\n\t" \
2853  "mr %0,3\n\t" \
2854  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2855  VALGRIND_RESTORE_STACK \
2856  : /*out*/ "=r" (_res) \
2857  : /*in*/ "r" (&_argvec[2]) \
2858  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2859  ); \
2860  lval = (__typeof__(lval)) _res; \
2861  } while (0)
2862 
2863 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2864  arg7,arg8,arg9) \
2865  do { \
2866  volatile OrigFn _orig = (orig); \
2867  volatile unsigned long _argvec[3+9]; \
2868  volatile unsigned long _res; \
2869  /* _argvec[0] holds current r2 across the call */ \
2870  _argvec[1] = (unsigned long)_orig.r2; \
2871  _argvec[2] = (unsigned long)_orig.nraddr; \
2872  _argvec[2+1] = (unsigned long)arg1; \
2873  _argvec[2+2] = (unsigned long)arg2; \
2874  _argvec[2+3] = (unsigned long)arg3; \
2875  _argvec[2+4] = (unsigned long)arg4; \
2876  _argvec[2+5] = (unsigned long)arg5; \
2877  _argvec[2+6] = (unsigned long)arg6; \
2878  _argvec[2+7] = (unsigned long)arg7; \
2879  _argvec[2+8] = (unsigned long)arg8; \
2880  _argvec[2+9] = (unsigned long)arg9; \
2881  __asm__ volatile( \
2882  VALGRIND_ALIGN_STACK \
2883  "mr 11,%1\n\t" \
2884  "std 2,-16(11)\n\t" /* save tocptr */ \
2885  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2886  "addi 1,1,-128\n\t" /* expand stack frame */ \
2887  /* arg9 */ \
2888  "ld 3,72(11)\n\t" \
2889  "std 3,112(1)\n\t" \
2890  /* args1-8 */ \
2891  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2892  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2893  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2894  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2895  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2896  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2897  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2898  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2899  "ld 11, 0(11)\n\t" /* target->r11 */ \
2900  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2901  "mr 11,%1\n\t" \
2902  "mr %0,3\n\t" \
2903  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2904  VALGRIND_RESTORE_STACK \
2905  : /*out*/ "=r" (_res) \
2906  : /*in*/ "r" (&_argvec[2]) \
2907  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2908  ); \
2909  lval = (__typeof__(lval)) _res; \
2910  } while (0)
2911 
2912 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2913  arg7,arg8,arg9,arg10) \
2914  do { \
2915  volatile OrigFn _orig = (orig); \
2916  volatile unsigned long _argvec[3+10]; \
2917  volatile unsigned long _res; \
2918  /* _argvec[0] holds current r2 across the call */ \
2919  _argvec[1] = (unsigned long)_orig.r2; \
2920  _argvec[2] = (unsigned long)_orig.nraddr; \
2921  _argvec[2+1] = (unsigned long)arg1; \
2922  _argvec[2+2] = (unsigned long)arg2; \
2923  _argvec[2+3] = (unsigned long)arg3; \
2924  _argvec[2+4] = (unsigned long)arg4; \
2925  _argvec[2+5] = (unsigned long)arg5; \
2926  _argvec[2+6] = (unsigned long)arg6; \
2927  _argvec[2+7] = (unsigned long)arg7; \
2928  _argvec[2+8] = (unsigned long)arg8; \
2929  _argvec[2+9] = (unsigned long)arg9; \
2930  _argvec[2+10] = (unsigned long)arg10; \
2931  __asm__ volatile( \
2932  VALGRIND_ALIGN_STACK \
2933  "mr 11,%1\n\t" \
2934  "std 2,-16(11)\n\t" /* save tocptr */ \
2935  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2936  "addi 1,1,-128\n\t" /* expand stack frame */ \
2937  /* arg10 */ \
2938  "ld 3,80(11)\n\t" \
2939  "std 3,120(1)\n\t" \
2940  /* arg9 */ \
2941  "ld 3,72(11)\n\t" \
2942  "std 3,112(1)\n\t" \
2943  /* args1-8 */ \
2944  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2945  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2946  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2947  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2948  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2949  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2950  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2951  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2952  "ld 11, 0(11)\n\t" /* target->r11 */ \
2953  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2954  "mr 11,%1\n\t" \
2955  "mr %0,3\n\t" \
2956  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2957  VALGRIND_RESTORE_STACK \
2958  : /*out*/ "=r" (_res) \
2959  : /*in*/ "r" (&_argvec[2]) \
2960  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2961  ); \
2962  lval = (__typeof__(lval)) _res; \
2963  } while (0)
2964 
2965 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2966  arg7,arg8,arg9,arg10,arg11) \
2967  do { \
2968  volatile OrigFn _orig = (orig); \
2969  volatile unsigned long _argvec[3+11]; \
2970  volatile unsigned long _res; \
2971  /* _argvec[0] holds current r2 across the call */ \
2972  _argvec[1] = (unsigned long)_orig.r2; \
2973  _argvec[2] = (unsigned long)_orig.nraddr; \
2974  _argvec[2+1] = (unsigned long)arg1; \
2975  _argvec[2+2] = (unsigned long)arg2; \
2976  _argvec[2+3] = (unsigned long)arg3; \
2977  _argvec[2+4] = (unsigned long)arg4; \
2978  _argvec[2+5] = (unsigned long)arg5; \
2979  _argvec[2+6] = (unsigned long)arg6; \
2980  _argvec[2+7] = (unsigned long)arg7; \
2981  _argvec[2+8] = (unsigned long)arg8; \
2982  _argvec[2+9] = (unsigned long)arg9; \
2983  _argvec[2+10] = (unsigned long)arg10; \
2984  _argvec[2+11] = (unsigned long)arg11; \
2985  __asm__ volatile( \
2986  VALGRIND_ALIGN_STACK \
2987  "mr 11,%1\n\t" \
2988  "std 2,-16(11)\n\t" /* save tocptr */ \
2989  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2990  "addi 1,1,-144\n\t" /* expand stack frame */ \
2991  /* arg11 */ \
2992  "ld 3,88(11)\n\t" \
2993  "std 3,128(1)\n\t" \
2994  /* arg10 */ \
2995  "ld 3,80(11)\n\t" \
2996  "std 3,120(1)\n\t" \
2997  /* arg9 */ \
2998  "ld 3,72(11)\n\t" \
2999  "std 3,112(1)\n\t" \
3000  /* args1-8 */ \
3001  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3002  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3003  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3004  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3005  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3006  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3007  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3008  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3009  "ld 11, 0(11)\n\t" /* target->r11 */ \
3010  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3011  "mr 11,%1\n\t" \
3012  "mr %0,3\n\t" \
3013  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3014  VALGRIND_RESTORE_STACK \
3015  : /*out*/ "=r" (_res) \
3016  : /*in*/ "r" (&_argvec[2]) \
3017  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3018  ); \
3019  lval = (__typeof__(lval)) _res; \
3020  } while (0)
3021 
3022 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3023  arg7,arg8,arg9,arg10,arg11,arg12) \
3024  do { \
3025  volatile OrigFn _orig = (orig); \
3026  volatile unsigned long _argvec[3+12]; \
3027  volatile unsigned long _res; \
3028  /* _argvec[0] holds current r2 across the call */ \
3029  _argvec[1] = (unsigned long)_orig.r2; \
3030  _argvec[2] = (unsigned long)_orig.nraddr; \
3031  _argvec[2+1] = (unsigned long)arg1; \
3032  _argvec[2+2] = (unsigned long)arg2; \
3033  _argvec[2+3] = (unsigned long)arg3; \
3034  _argvec[2+4] = (unsigned long)arg4; \
3035  _argvec[2+5] = (unsigned long)arg5; \
3036  _argvec[2+6] = (unsigned long)arg6; \
3037  _argvec[2+7] = (unsigned long)arg7; \
3038  _argvec[2+8] = (unsigned long)arg8; \
3039  _argvec[2+9] = (unsigned long)arg9; \
3040  _argvec[2+10] = (unsigned long)arg10; \
3041  _argvec[2+11] = (unsigned long)arg11; \
3042  _argvec[2+12] = (unsigned long)arg12; \
3043  __asm__ volatile( \
3044  VALGRIND_ALIGN_STACK \
3045  "mr 11,%1\n\t" \
3046  "std 2,-16(11)\n\t" /* save tocptr */ \
3047  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3048  "addi 1,1,-144\n\t" /* expand stack frame */ \
3049  /* arg12 */ \
3050  "ld 3,96(11)\n\t" \
3051  "std 3,136(1)\n\t" \
3052  /* arg11 */ \
3053  "ld 3,88(11)\n\t" \
3054  "std 3,128(1)\n\t" \
3055  /* arg10 */ \
3056  "ld 3,80(11)\n\t" \
3057  "std 3,120(1)\n\t" \
3058  /* arg9 */ \
3059  "ld 3,72(11)\n\t" \
3060  "std 3,112(1)\n\t" \
3061  /* args1-8 */ \
3062  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3063  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3064  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3065  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3066  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3067  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3068  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3069  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3070  "ld 11, 0(11)\n\t" /* target->r11 */ \
3071  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3072  "mr 11,%1\n\t" \
3073  "mr %0,3\n\t" \
3074  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3075  VALGRIND_RESTORE_STACK \
3076  : /*out*/ "=r" (_res) \
3077  : /*in*/ "r" (&_argvec[2]) \
3078  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3079  ); \
3080  lval = (__typeof__(lval)) _res; \
3081  } while (0)
3082 
3083 #endif /* PLAT_ppc64_linux */
3084 
3085 /* ------------------------- arm-linux ------------------------- */
3086 
3087 #if defined(PLAT_arm_linux)
3088 
3089 /* These regs are trashed by the hidden call. */
3090 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3091 
3092 /* Macros to save and align the stack before making a function
3093  call and restore it afterwards as gcc may not keep the stack
3094  pointer aligned if it doesn't realise calls are being made
3095  to other functions. */
3096 
3097 /* This is a bit tricky. We store the original stack pointer in r10
3098  as it is callee-saves. gcc doesn't allow the use of r11 for some
3099  reason. Also, we can't directly "bic" the stack pointer in thumb
3100  mode since r13 isn't an allowed register number in that context.
3101  So use r4 as a temporary, since that is about to get trashed
3102  anyway, just after each use of this macro. Side effect is we need
3103  to be very careful about any future changes, since
3104  VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3105 #define VALGRIND_ALIGN_STACK \
3106  "mov r10, sp\n\t" \
3107  "mov r4, sp\n\t" \
3108  "bic r4, r4, #7\n\t" \
3109  "mov sp, r4\n\t"
3110 #define VALGRIND_RESTORE_STACK \
3111  "mov sp, r10\n\t"
3112 
3113 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3114  long) == 4. */
3115 
3116 #define CALL_FN_W_v(lval, orig) \
3117  do { \
3118  volatile OrigFn _orig = (orig); \
3119  volatile unsigned long _argvec[1]; \
3120  volatile unsigned long _res; \
3121  _argvec[0] = (unsigned long)_orig.nraddr; \
3122  __asm__ volatile( \
3123  VALGRIND_ALIGN_STACK \
3124  "ldr r4, [%1] \n\t" /* target->r4 */ \
3125  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3126  VALGRIND_RESTORE_STACK \
3127  "mov %0, r0\n" \
3128  : /*out*/ "=r" (_res) \
3129  : /*in*/ "0" (&_argvec[0]) \
3130  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3131  ); \
3132  lval = (__typeof__(lval)) _res; \
3133  } while (0)
3134 
3135 #define CALL_FN_W_W(lval, orig, arg1) \
3136  do { \
3137  volatile OrigFn _orig = (orig); \
3138  volatile unsigned long _argvec[2]; \
3139  volatile unsigned long _res; \
3140  _argvec[0] = (unsigned long)_orig.nraddr; \
3141  _argvec[1] = (unsigned long)(arg1); \
3142  __asm__ volatile( \
3143  VALGRIND_ALIGN_STACK \
3144  "ldr r0, [%1, #4] \n\t" \
3145  "ldr r4, [%1] \n\t" /* target->r4 */ \
3146  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3147  VALGRIND_RESTORE_STACK \
3148  "mov %0, r0\n" \
3149  : /*out*/ "=r" (_res) \
3150  : /*in*/ "0" (&_argvec[0]) \
3151  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3152  ); \
3153  lval = (__typeof__(lval)) _res; \
3154  } while (0)
3155 
3156 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3157  do { \
3158  volatile OrigFn _orig = (orig); \
3159  volatile unsigned long _argvec[3]; \
3160  volatile unsigned long _res; \
3161  _argvec[0] = (unsigned long)_orig.nraddr; \
3162  _argvec[1] = (unsigned long)(arg1); \
3163  _argvec[2] = (unsigned long)(arg2); \
3164  __asm__ volatile( \
3165  VALGRIND_ALIGN_STACK \
3166  "ldr r0, [%1, #4] \n\t" \
3167  "ldr r1, [%1, #8] \n\t" \
3168  "ldr r4, [%1] \n\t" /* target->r4 */ \
3169  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3170  VALGRIND_RESTORE_STACK \
3171  "mov %0, r0\n" \
3172  : /*out*/ "=r" (_res) \
3173  : /*in*/ "0" (&_argvec[0]) \
3174  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3175  ); \
3176  lval = (__typeof__(lval)) _res; \
3177  } while (0)
3178 
3179 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3180  do { \
3181  volatile OrigFn _orig = (orig); \
3182  volatile unsigned long _argvec[4]; \
3183  volatile unsigned long _res; \
3184  _argvec[0] = (unsigned long)_orig.nraddr; \
3185  _argvec[1] = (unsigned long)(arg1); \
3186  _argvec[2] = (unsigned long)(arg2); \
3187  _argvec[3] = (unsigned long)(arg3); \
3188  __asm__ volatile( \
3189  VALGRIND_ALIGN_STACK \
3190  "ldr r0, [%1, #4] \n\t" \
3191  "ldr r1, [%1, #8] \n\t" \
3192  "ldr r2, [%1, #12] \n\t" \
3193  "ldr r4, [%1] \n\t" /* target->r4 */ \
3194  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3195  VALGRIND_RESTORE_STACK \
3196  "mov %0, r0\n" \
3197  : /*out*/ "=r" (_res) \
3198  : /*in*/ "0" (&_argvec[0]) \
3199  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3200  ); \
3201  lval = (__typeof__(lval)) _res; \
3202  } while (0)
3203 
3204 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3205  do { \
3206  volatile OrigFn _orig = (orig); \
3207  volatile unsigned long _argvec[5]; \
3208  volatile unsigned long _res; \
3209  _argvec[0] = (unsigned long)_orig.nraddr; \
3210  _argvec[1] = (unsigned long)(arg1); \
3211  _argvec[2] = (unsigned long)(arg2); \
3212  _argvec[3] = (unsigned long)(arg3); \
3213  _argvec[4] = (unsigned long)(arg4); \
3214  __asm__ volatile( \
3215  VALGRIND_ALIGN_STACK \
3216  "ldr r0, [%1, #4] \n\t" \
3217  "ldr r1, [%1, #8] \n\t" \
3218  "ldr r2, [%1, #12] \n\t" \
3219  "ldr r3, [%1, #16] \n\t" \
3220  "ldr r4, [%1] \n\t" /* target->r4 */ \
3221  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3222  VALGRIND_RESTORE_STACK \
3223  "mov %0, r0" \
3224  : /*out*/ "=r" (_res) \
3225  : /*in*/ "0" (&_argvec[0]) \
3226  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3227  ); \
3228  lval = (__typeof__(lval)) _res; \
3229  } while (0)
3230 
3231 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3232  do { \
3233  volatile OrigFn _orig = (orig); \
3234  volatile unsigned long _argvec[6]; \
3235  volatile unsigned long _res; \
3236  _argvec[0] = (unsigned long)_orig.nraddr; \
3237  _argvec[1] = (unsigned long)(arg1); \
3238  _argvec[2] = (unsigned long)(arg2); \
3239  _argvec[3] = (unsigned long)(arg3); \
3240  _argvec[4] = (unsigned long)(arg4); \
3241  _argvec[5] = (unsigned long)(arg5); \
3242  __asm__ volatile( \
3243  VALGRIND_ALIGN_STACK \
3244  "sub sp, sp, #4 \n\t" \
3245  "ldr r0, [%1, #20] \n\t" \
3246  "push {r0} \n\t" \
3247  "ldr r0, [%1, #4] \n\t" \
3248  "ldr r1, [%1, #8] \n\t" \
3249  "ldr r2, [%1, #12] \n\t" \
3250  "ldr r3, [%1, #16] \n\t" \
3251  "ldr r4, [%1] \n\t" /* target->r4 */ \
3252  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3253  VALGRIND_RESTORE_STACK \
3254  "mov %0, r0" \
3255  : /*out*/ "=r" (_res) \
3256  : /*in*/ "0" (&_argvec[0]) \
3257  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3258  ); \
3259  lval = (__typeof__(lval)) _res; \
3260  } while (0)
3261 
3262 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3263  do { \
3264  volatile OrigFn _orig = (orig); \
3265  volatile unsigned long _argvec[7]; \
3266  volatile unsigned long _res; \
3267  _argvec[0] = (unsigned long)_orig.nraddr; \
3268  _argvec[1] = (unsigned long)(arg1); \
3269  _argvec[2] = (unsigned long)(arg2); \
3270  _argvec[3] = (unsigned long)(arg3); \
3271  _argvec[4] = (unsigned long)(arg4); \
3272  _argvec[5] = (unsigned long)(arg5); \
3273  _argvec[6] = (unsigned long)(arg6); \
3274  __asm__ volatile( \
3275  VALGRIND_ALIGN_STACK \
3276  "ldr r0, [%1, #20] \n\t" \
3277  "ldr r1, [%1, #24] \n\t" \
3278  "push {r0, r1} \n\t" \
3279  "ldr r0, [%1, #4] \n\t" \
3280  "ldr r1, [%1, #8] \n\t" \
3281  "ldr r2, [%1, #12] \n\t" \
3282  "ldr r3, [%1, #16] \n\t" \
3283  "ldr r4, [%1] \n\t" /* target->r4 */ \
3284  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3285  VALGRIND_RESTORE_STACK \
3286  "mov %0, r0" \
3287  : /*out*/ "=r" (_res) \
3288  : /*in*/ "0" (&_argvec[0]) \
3289  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3290  ); \
3291  lval = (__typeof__(lval)) _res; \
3292  } while (0)
3293 
3294 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3295  arg7) \
3296  do { \
3297  volatile OrigFn _orig = (orig); \
3298  volatile unsigned long _argvec[8]; \
3299  volatile unsigned long _res; \
3300  _argvec[0] = (unsigned long)_orig.nraddr; \
3301  _argvec[1] = (unsigned long)(arg1); \
3302  _argvec[2] = (unsigned long)(arg2); \
3303  _argvec[3] = (unsigned long)(arg3); \
3304  _argvec[4] = (unsigned long)(arg4); \
3305  _argvec[5] = (unsigned long)(arg5); \
3306  _argvec[6] = (unsigned long)(arg6); \
3307  _argvec[7] = (unsigned long)(arg7); \
3308  __asm__ volatile( \
3309  VALGRIND_ALIGN_STACK \
3310  "sub sp, sp, #4 \n\t" \
3311  "ldr r0, [%1, #20] \n\t" \
3312  "ldr r1, [%1, #24] \n\t" \
3313  "ldr r2, [%1, #28] \n\t" \
3314  "push {r0, r1, r2} \n\t" \
3315  "ldr r0, [%1, #4] \n\t" \
3316  "ldr r1, [%1, #8] \n\t" \
3317  "ldr r2, [%1, #12] \n\t" \
3318  "ldr r3, [%1, #16] \n\t" \
3319  "ldr r4, [%1] \n\t" /* target->r4 */ \
3320  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3321  VALGRIND_RESTORE_STACK \
3322  "mov %0, r0" \
3323  : /*out*/ "=r" (_res) \
3324  : /*in*/ "0" (&_argvec[0]) \
3325  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3326  ); \
3327  lval = (__typeof__(lval)) _res; \
3328  } while (0)
3329 
3330 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3331  arg7,arg8) \
3332  do { \
3333  volatile OrigFn _orig = (orig); \
3334  volatile unsigned long _argvec[9]; \
3335  volatile unsigned long _res; \
3336  _argvec[0] = (unsigned long)_orig.nraddr; \
3337  _argvec[1] = (unsigned long)(arg1); \
3338  _argvec[2] = (unsigned long)(arg2); \
3339  _argvec[3] = (unsigned long)(arg3); \
3340  _argvec[4] = (unsigned long)(arg4); \
3341  _argvec[5] = (unsigned long)(arg5); \
3342  _argvec[6] = (unsigned long)(arg6); \
3343  _argvec[7] = (unsigned long)(arg7); \
3344  _argvec[8] = (unsigned long)(arg8); \
3345  __asm__ volatile( \
3346  VALGRIND_ALIGN_STACK \
3347  "ldr r0, [%1, #20] \n\t" \
3348  "ldr r1, [%1, #24] \n\t" \
3349  "ldr r2, [%1, #28] \n\t" \
3350  "ldr r3, [%1, #32] \n\t" \
3351  "push {r0, r1, r2, r3} \n\t" \
3352  "ldr r0, [%1, #4] \n\t" \
3353  "ldr r1, [%1, #8] \n\t" \
3354  "ldr r2, [%1, #12] \n\t" \
3355  "ldr r3, [%1, #16] \n\t" \
3356  "ldr r4, [%1] \n\t" /* target->r4 */ \
3357  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3358  VALGRIND_RESTORE_STACK \
3359  "mov %0, r0" \
3360  : /*out*/ "=r" (_res) \
3361  : /*in*/ "0" (&_argvec[0]) \
3362  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3363  ); \
3364  lval = (__typeof__(lval)) _res; \
3365  } while (0)
3366 
3367 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3368  arg7,arg8,arg9) \
3369  do { \
3370  volatile OrigFn _orig = (orig); \
3371  volatile unsigned long _argvec[10]; \
3372  volatile unsigned long _res; \
3373  _argvec[0] = (unsigned long)_orig.nraddr; \
3374  _argvec[1] = (unsigned long)(arg1); \
3375  _argvec[2] = (unsigned long)(arg2); \
3376  _argvec[3] = (unsigned long)(arg3); \
3377  _argvec[4] = (unsigned long)(arg4); \
3378  _argvec[5] = (unsigned long)(arg5); \
3379  _argvec[6] = (unsigned long)(arg6); \
3380  _argvec[7] = (unsigned long)(arg7); \
3381  _argvec[8] = (unsigned long)(arg8); \
3382  _argvec[9] = (unsigned long)(arg9); \
3383  __asm__ volatile( \
3384  VALGRIND_ALIGN_STACK \
3385  "sub sp, sp, #4 \n\t" \
3386  "ldr r0, [%1, #20] \n\t" \
3387  "ldr r1, [%1, #24] \n\t" \
3388  "ldr r2, [%1, #28] \n\t" \
3389  "ldr r3, [%1, #32] \n\t" \
3390  "ldr r4, [%1, #36] \n\t" \
3391  "push {r0, r1, r2, r3, r4} \n\t" \
3392  "ldr r0, [%1, #4] \n\t" \
3393  "ldr r1, [%1, #8] \n\t" \
3394  "ldr r2, [%1, #12] \n\t" \
3395  "ldr r3, [%1, #16] \n\t" \
3396  "ldr r4, [%1] \n\t" /* target->r4 */ \
3397  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3398  VALGRIND_RESTORE_STACK \
3399  "mov %0, r0" \
3400  : /*out*/ "=r" (_res) \
3401  : /*in*/ "0" (&_argvec[0]) \
3402  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3403  ); \
3404  lval = (__typeof__(lval)) _res; \
3405  } while (0)
3406 
3407 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3408  arg7,arg8,arg9,arg10) \
3409  do { \
3410  volatile OrigFn _orig = (orig); \
3411  volatile unsigned long _argvec[11]; \
3412  volatile unsigned long _res; \
3413  _argvec[0] = (unsigned long)_orig.nraddr; \
3414  _argvec[1] = (unsigned long)(arg1); \
3415  _argvec[2] = (unsigned long)(arg2); \
3416  _argvec[3] = (unsigned long)(arg3); \
3417  _argvec[4] = (unsigned long)(arg4); \
3418  _argvec[5] = (unsigned long)(arg5); \
3419  _argvec[6] = (unsigned long)(arg6); \
3420  _argvec[7] = (unsigned long)(arg7); \
3421  _argvec[8] = (unsigned long)(arg8); \
3422  _argvec[9] = (unsigned long)(arg9); \
3423  _argvec[10] = (unsigned long)(arg10); \
3424  __asm__ volatile( \
3425  VALGRIND_ALIGN_STACK \
3426  "ldr r0, [%1, #40] \n\t" \
3427  "push {r0} \n\t" \
3428  "ldr r0, [%1, #20] \n\t" \
3429  "ldr r1, [%1, #24] \n\t" \
3430  "ldr r2, [%1, #28] \n\t" \
3431  "ldr r3, [%1, #32] \n\t" \
3432  "ldr r4, [%1, #36] \n\t" \
3433  "push {r0, r1, r2, r3, r4} \n\t" \
3434  "ldr r0, [%1, #4] \n\t" \
3435  "ldr r1, [%1, #8] \n\t" \
3436  "ldr r2, [%1, #12] \n\t" \
3437  "ldr r3, [%1, #16] \n\t" \
3438  "ldr r4, [%1] \n\t" /* target->r4 */ \
3439  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3440  VALGRIND_RESTORE_STACK \
3441  "mov %0, r0" \
3442  : /*out*/ "=r" (_res) \
3443  : /*in*/ "0" (&_argvec[0]) \
3444  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3445  ); \
3446  lval = (__typeof__(lval)) _res; \
3447  } while (0)
3448 
3449 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3450  arg6,arg7,arg8,arg9,arg10, \
3451  arg11) \
3452  do { \
3453  volatile OrigFn _orig = (orig); \
3454  volatile unsigned long _argvec[12]; \
3455  volatile unsigned long _res; \
3456  _argvec[0] = (unsigned long)_orig.nraddr; \
3457  _argvec[1] = (unsigned long)(arg1); \
3458  _argvec[2] = (unsigned long)(arg2); \
3459  _argvec[3] = (unsigned long)(arg3); \
3460  _argvec[4] = (unsigned long)(arg4); \
3461  _argvec[5] = (unsigned long)(arg5); \
3462  _argvec[6] = (unsigned long)(arg6); \
3463  _argvec[7] = (unsigned long)(arg7); \
3464  _argvec[8] = (unsigned long)(arg8); \
3465  _argvec[9] = (unsigned long)(arg9); \
3466  _argvec[10] = (unsigned long)(arg10); \
3467  _argvec[11] = (unsigned long)(arg11); \
3468  __asm__ volatile( \
3469  VALGRIND_ALIGN_STACK \
3470  "sub sp, sp, #4 \n\t" \
3471  "ldr r0, [%1, #40] \n\t" \
3472  "ldr r1, [%1, #44] \n\t" \
3473  "push {r0, r1} \n\t" \
3474  "ldr r0, [%1, #20] \n\t" \
3475  "ldr r1, [%1, #24] \n\t" \
3476  "ldr r2, [%1, #28] \n\t" \
3477  "ldr r3, [%1, #32] \n\t" \
3478  "ldr r4, [%1, #36] \n\t" \
3479  "push {r0, r1, r2, r3, r4} \n\t" \
3480  "ldr r0, [%1, #4] \n\t" \
3481  "ldr r1, [%1, #8] \n\t" \
3482  "ldr r2, [%1, #12] \n\t" \
3483  "ldr r3, [%1, #16] \n\t" \
3484  "ldr r4, [%1] \n\t" /* target->r4 */ \
3485  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3486  VALGRIND_RESTORE_STACK \
3487  "mov %0, r0" \
3488  : /*out*/ "=r" (_res) \
3489  : /*in*/ "0" (&_argvec[0]) \
3490  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3491  ); \
3492  lval = (__typeof__(lval)) _res; \
3493  } while (0)
3494 
3495 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3496  arg6,arg7,arg8,arg9,arg10, \
3497  arg11,arg12) \
3498  do { \
3499  volatile OrigFn _orig = (orig); \
3500  volatile unsigned long _argvec[13]; \
3501  volatile unsigned long _res; \
3502  _argvec[0] = (unsigned long)_orig.nraddr; \
3503  _argvec[1] = (unsigned long)(arg1); \
3504  _argvec[2] = (unsigned long)(arg2); \
3505  _argvec[3] = (unsigned long)(arg3); \
3506  _argvec[4] = (unsigned long)(arg4); \
3507  _argvec[5] = (unsigned long)(arg5); \
3508  _argvec[6] = (unsigned long)(arg6); \
3509  _argvec[7] = (unsigned long)(arg7); \
3510  _argvec[8] = (unsigned long)(arg8); \
3511  _argvec[9] = (unsigned long)(arg9); \
3512  _argvec[10] = (unsigned long)(arg10); \
3513  _argvec[11] = (unsigned long)(arg11); \
3514  _argvec[12] = (unsigned long)(arg12); \
3515  __asm__ volatile( \
3516  VALGRIND_ALIGN_STACK \
3517  "ldr r0, [%1, #40] \n\t" \
3518  "ldr r1, [%1, #44] \n\t" \
3519  "ldr r2, [%1, #48] \n\t" \
3520  "push {r0, r1, r2} \n\t" \
3521  "ldr r0, [%1, #20] \n\t" \
3522  "ldr r1, [%1, #24] \n\t" \
3523  "ldr r2, [%1, #28] \n\t" \
3524  "ldr r3, [%1, #32] \n\t" \
3525  "ldr r4, [%1, #36] \n\t" \
3526  "push {r0, r1, r2, r3, r4} \n\t" \
3527  "ldr r0, [%1, #4] \n\t" \
3528  "ldr r1, [%1, #8] \n\t" \
3529  "ldr r2, [%1, #12] \n\t" \
3530  "ldr r3, [%1, #16] \n\t" \
3531  "ldr r4, [%1] \n\t" /* target->r4 */ \
3532  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3533  VALGRIND_RESTORE_STACK \
3534  "mov %0, r0" \
3535  : /*out*/ "=r" (_res) \
3536  : /*in*/ "0" (&_argvec[0]) \
3537  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3538  ); \
3539  lval = (__typeof__(lval)) _res; \
3540  } while (0)
3541 
3542 #endif /* PLAT_arm_linux */
3543 
3544 /* ------------------------ arm64-linux ------------------------ */
3545 
3546 #if defined(PLAT_arm64_linux)
3547 
3548 /* These regs are trashed by the hidden call. */
3549 #define __CALLER_SAVED_REGS \
3550  "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
3551  "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
3552  "x18", "x19", "x20", "x30", \
3553  "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
3554  "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
3555  "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
3556  "v26", "v27", "v28", "v29", "v30", "v31"
3557 
3558 /* x21 is callee-saved, so we can use it to save and restore SP around
3559  the hidden call. */
3560 #define VALGRIND_ALIGN_STACK \
3561  "mov x21, sp\n\t" \
3562  "bic sp, x21, #15\n\t"
3563 #define VALGRIND_RESTORE_STACK \
3564  "mov sp, x21\n\t"
3565 
3566 /* These CALL_FN_ macros assume that on arm64-linux,
3567  sizeof(unsigned long) == 8. */
3568 
3569 #define CALL_FN_W_v(lval, orig) \
3570  do { \
3571  volatile OrigFn _orig = (orig); \
3572  volatile unsigned long _argvec[1]; \
3573  volatile unsigned long _res; \
3574  _argvec[0] = (unsigned long)_orig.nraddr; \
3575  __asm__ volatile( \
3576  VALGRIND_ALIGN_STACK \
3577  "ldr x8, [%1] \n\t" /* target->x8 */ \
3578  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3579  VALGRIND_RESTORE_STACK \
3580  "mov %0, x0\n" \
3581  : /*out*/ "=r" (_res) \
3582  : /*in*/ "0" (&_argvec[0]) \
3583  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3584  ); \
3585  lval = (__typeof__(lval)) _res; \
3586  } while (0)
3587 
3588 #define CALL_FN_W_W(lval, orig, arg1) \
3589  do { \
3590  volatile OrigFn _orig = (orig); \
3591  volatile unsigned long _argvec[2]; \
3592  volatile unsigned long _res; \
3593  _argvec[0] = (unsigned long)_orig.nraddr; \
3594  _argvec[1] = (unsigned long)(arg1); \
3595  __asm__ volatile( \
3596  VALGRIND_ALIGN_STACK \
3597  "ldr x0, [%1, #8] \n\t" \
3598  "ldr x8, [%1] \n\t" /* target->x8 */ \
3599  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3600  VALGRIND_RESTORE_STACK \
3601  "mov %0, x0\n" \
3602  : /*out*/ "=r" (_res) \
3603  : /*in*/ "0" (&_argvec[0]) \
3604  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3605  ); \
3606  lval = (__typeof__(lval)) _res; \
3607  } while (0)
3608 
3609 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3610  do { \
3611  volatile OrigFn _orig = (orig); \
3612  volatile unsigned long _argvec[3]; \
3613  volatile unsigned long _res; \
3614  _argvec[0] = (unsigned long)_orig.nraddr; \
3615  _argvec[1] = (unsigned long)(arg1); \
3616  _argvec[2] = (unsigned long)(arg2); \
3617  __asm__ volatile( \
3618  VALGRIND_ALIGN_STACK \
3619  "ldr x0, [%1, #8] \n\t" \
3620  "ldr x1, [%1, #16] \n\t" \
3621  "ldr x8, [%1] \n\t" /* target->x8 */ \
3622  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3623  VALGRIND_RESTORE_STACK \
3624  "mov %0, x0\n" \
3625  : /*out*/ "=r" (_res) \
3626  : /*in*/ "0" (&_argvec[0]) \
3627  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS/*, "r10"*/ \
3628  ); \
3629  lval = (__typeof__(lval)) _res; \
3630  } while (0)
3631 
3632 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3633  do { \
3634  volatile OrigFn _orig = (orig); \
3635  volatile unsigned long _argvec[4]; \
3636  volatile unsigned long _res; \
3637  _argvec[0] = (unsigned long)_orig.nraddr; \
3638  _argvec[1] = (unsigned long)(arg1); \
3639  _argvec[2] = (unsigned long)(arg2); \
3640  _argvec[3] = (unsigned long)(arg3); \
3641  __asm__ volatile( \
3642  VALGRIND_ALIGN_STACK \
3643  "ldr x0, [%1, #8] \n\t" \
3644  "ldr x1, [%1, #16] \n\t" \
3645  "ldr x2, [%1, #24] \n\t" \
3646  "ldr x8, [%1] \n\t" /* target->x8 */ \
3647  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3648  VALGRIND_RESTORE_STACK \
3649  "mov %0, x0\n" \
3650  : /*out*/ "=r" (_res) \
3651  : /*in*/ "0" (&_argvec[0]) \
3652  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS/*, "r10"*/ \
3653  ); \
3654  lval = (__typeof__(lval)) _res; \
3655  } while (0)
3656 
3657 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3658  do { \
3659  volatile OrigFn _orig = (orig); \
3660  volatile unsigned long _argvec[5]; \
3661  volatile unsigned long _res; \
3662  _argvec[0] = (unsigned long)_orig.nraddr; \
3663  _argvec[1] = (unsigned long)(arg1); \
3664  _argvec[2] = (unsigned long)(arg2); \
3665  _argvec[3] = (unsigned long)(arg3); \
3666  _argvec[4] = (unsigned long)(arg4); \
3667  __asm__ volatile( \
3668  VALGRIND_ALIGN_STACK \
3669  "ldr x0, [%1, #8] \n\t" \
3670  "ldr x1, [%1, #16] \n\t" \
3671  "ldr x2, [%1, #24] \n\t" \
3672  "ldr x3, [%1, #32] \n\t" \
3673  "ldr x8, [%1] \n\t" /* target->x8 */ \
3674  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3675  VALGRIND_RESTORE_STACK \
3676  "mov %0, x0" \
3677  : /*out*/ "=r" (_res) \
3678  : /*in*/ "0" (&_argvec[0]) \
3679  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS/*, "r10"*/ \
3680  ); \
3681  lval = (__typeof__(lval)) _res; \
3682  } while (0)
3683 
3684 #endif /* PLAT_arm64_linux */
3685 
3686 /* ------------------------- s390x-linux ------------------------- */
3687 
3688 #if defined(PLAT_s390x_linux)
3689 
3690 /* Similar workaround as amd64 (see above), but we use r11 as frame
3691  pointer and save the old r11 in r7. r11 might be used for
3692  argvec, therefore we copy argvec in r1 since r1 is clobbered
3693  after the call anyway. */
3694 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
3695 # define __FRAME_POINTER \
3696  ,"d"(__builtin_dwarf_cfa())
3697 # define VALGRIND_CFI_PROLOGUE \
3698  ".cfi_remember_state\n\t" \
3699  "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3700  "lgr 7,11\n\t" \
3701  "lgr 11,%2\n\t" \
3702  ".cfi_def_cfa r11, 0\n\t"
3703 # define VALGRIND_CFI_EPILOGUE \
3704  "lgr 11, 7\n\t" \
3705  ".cfi_restore_state\n\t"
3706 #else
3707 # define __FRAME_POINTER
3708 # define VALGRIND_CFI_PROLOGUE \
3709  "lgr 1,%1\n\t"
3710 # define VALGRIND_CFI_EPILOGUE
3711 #endif
3712 
3713 /* Nb: On s390 the stack pointer is properly aligned *at all times*
3714  according to the s390 GCC maintainer. (The ABI specification is not
3715  precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
3716  VALGRIND_RESTORE_STACK are not defined here. */
3717 
3718 /* These regs are trashed by the hidden call. Note that we overwrite
3719  r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3720  function a proper return address. All others are ABI defined call
3721  clobbers. */
3722 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
3723  "f0","f1","f2","f3","f4","f5","f6","f7"
3724 
3725 /* Nb: Although r11 is modified in the asm snippets below (inside
3726  VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
3727  two reasons:
3728  (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
3729  modified
3730  (2) GCC will complain that r11 cannot appear inside a clobber section,
3731  when compiled with -O -fno-omit-frame-pointer
3732  */
3733 
3734 #define CALL_FN_W_v(lval, orig) \
3735  do { \
3736  volatile OrigFn _orig = (orig); \
3737  volatile unsigned long _argvec[1]; \
3738  volatile unsigned long _res; \
3739  _argvec[0] = (unsigned long)_orig.nraddr; \
3740  __asm__ volatile( \
3741  VALGRIND_CFI_PROLOGUE \
3742  "aghi 15,-160\n\t" \
3743  "lg 1, 0(1)\n\t" /* target->r1 */ \
3744  VALGRIND_CALL_NOREDIR_R1 \
3745  "lgr %0, 2\n\t" \
3746  "aghi 15,160\n\t" \
3747  VALGRIND_CFI_EPILOGUE \
3748  : /*out*/ "=d" (_res) \
3749  : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
3750  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3751  ); \
3752  lval = (__typeof__(lval)) _res; \
3753  } while (0)
3754 
3755 /* The call abi has the arguments in r2-r6 and stack */
3756 #define CALL_FN_W_W(lval, orig, arg1) \
3757  do { \
3758  volatile OrigFn _orig = (orig); \
3759  volatile unsigned long _argvec[2]; \
3760  volatile unsigned long _res; \
3761  _argvec[0] = (unsigned long)_orig.nraddr; \
3762  _argvec[1] = (unsigned long)arg1; \
3763  __asm__ volatile( \
3764  VALGRIND_CFI_PROLOGUE \
3765  "aghi 15,-160\n\t" \
3766  "lg 2, 8(1)\n\t" \
3767  "lg 1, 0(1)\n\t" \
3768  VALGRIND_CALL_NOREDIR_R1 \
3769  "lgr %0, 2\n\t" \
3770  "aghi 15,160\n\t" \
3771  VALGRIND_CFI_EPILOGUE \
3772  : /*out*/ "=d" (_res) \
3773  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3774  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3775  ); \
3776  lval = (__typeof__(lval)) _res; \
3777  } while (0)
3778 
3779 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3780  do { \
3781  volatile OrigFn _orig = (orig); \
3782  volatile unsigned long _argvec[3]; \
3783  volatile unsigned long _res; \
3784  _argvec[0] = (unsigned long)_orig.nraddr; \
3785  _argvec[1] = (unsigned long)arg1; \
3786  _argvec[2] = (unsigned long)arg2; \
3787  __asm__ volatile( \
3788  VALGRIND_CFI_PROLOGUE \
3789  "aghi 15,-160\n\t" \
3790  "lg 2, 8(1)\n\t" \
3791  "lg 3,16(1)\n\t" \
3792  "lg 1, 0(1)\n\t" \
3793  VALGRIND_CALL_NOREDIR_R1 \
3794  "lgr %0, 2\n\t" \
3795  "aghi 15,160\n\t" \
3796  VALGRIND_CFI_EPILOGUE \
3797  : /*out*/ "=d" (_res) \
3798  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3799  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3800  ); \
3801  lval = (__typeof__(lval)) _res; \
3802  } while (0)
3803 
3804 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3805  do { \
3806  volatile OrigFn _orig = (orig); \
3807  volatile unsigned long _argvec[4]; \
3808  volatile unsigned long _res; \
3809  _argvec[0] = (unsigned long)_orig.nraddr; \
3810  _argvec[1] = (unsigned long)arg1; \
3811  _argvec[2] = (unsigned long)arg2; \
3812  _argvec[3] = (unsigned long)arg3; \
3813  __asm__ volatile( \
3814  VALGRIND_CFI_PROLOGUE \
3815  "aghi 15,-160\n\t" \
3816  "lg 2, 8(1)\n\t" \
3817  "lg 3,16(1)\n\t" \
3818  "lg 4,24(1)\n\t" \
3819  "lg 1, 0(1)\n\t" \
3820  VALGRIND_CALL_NOREDIR_R1 \
3821  "lgr %0, 2\n\t" \
3822  "aghi 15,160\n\t" \
3823  VALGRIND_CFI_EPILOGUE \
3824  : /*out*/ "=d" (_res) \
3825  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3826  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3827  ); \
3828  lval = (__typeof__(lval)) _res; \
3829  } while (0)
3830 
3831 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3832  do { \
3833  volatile OrigFn _orig = (orig); \
3834  volatile unsigned long _argvec[5]; \
3835  volatile unsigned long _res; \
3836  _argvec[0] = (unsigned long)_orig.nraddr; \
3837  _argvec[1] = (unsigned long)arg1; \
3838  _argvec[2] = (unsigned long)arg2; \
3839  _argvec[3] = (unsigned long)arg3; \
3840  _argvec[4] = (unsigned long)arg4; \
3841  __asm__ volatile( \
3842  VALGRIND_CFI_PROLOGUE \
3843  "aghi 15,-160\n\t" \
3844  "lg 2, 8(1)\n\t" \
3845  "lg 3,16(1)\n\t" \
3846  "lg 4,24(1)\n\t" \
3847  "lg 5,32(1)\n\t" \
3848  "lg 1, 0(1)\n\t" \
3849  VALGRIND_CALL_NOREDIR_R1 \
3850  "lgr %0, 2\n\t" \
3851  "aghi 15,160\n\t" \
3852  VALGRIND_CFI_EPILOGUE \
3853  : /*out*/ "=d" (_res) \
3854  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3855  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3856  ); \
3857  lval = (__typeof__(lval)) _res; \
3858  } while (0)
3859 
3860 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3861  do { \
3862  volatile OrigFn _orig = (orig); \
3863  volatile unsigned long _argvec[6]; \
3864  volatile unsigned long _res; \
3865  _argvec[0] = (unsigned long)_orig.nraddr; \
3866  _argvec[1] = (unsigned long)arg1; \
3867  _argvec[2] = (unsigned long)arg2; \
3868  _argvec[3] = (unsigned long)arg3; \
3869  _argvec[4] = (unsigned long)arg4; \
3870  _argvec[5] = (unsigned long)arg5; \
3871  __asm__ volatile( \
3872  VALGRIND_CFI_PROLOGUE \
3873  "aghi 15,-160\n\t" \
3874  "lg 2, 8(1)\n\t" \
3875  "lg 3,16(1)\n\t" \
3876  "lg 4,24(1)\n\t" \
3877  "lg 5,32(1)\n\t" \
3878  "lg 6,40(1)\n\t" \
3879  "lg 1, 0(1)\n\t" \
3880  VALGRIND_CALL_NOREDIR_R1 \
3881  "lgr %0, 2\n\t" \
3882  "aghi 15,160\n\t" \
3883  VALGRIND_CFI_EPILOGUE \
3884  : /*out*/ "=d" (_res) \
3885  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3886  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3887  ); \
3888  lval = (__typeof__(lval)) _res; \
3889  } while (0)
3890 
3891 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3892  arg6) \
3893  do { \
3894  volatile OrigFn _orig = (orig); \
3895  volatile unsigned long _argvec[7]; \
3896  volatile unsigned long _res; \
3897  _argvec[0] = (unsigned long)_orig.nraddr; \
3898  _argvec[1] = (unsigned long)arg1; \
3899  _argvec[2] = (unsigned long)arg2; \
3900  _argvec[3] = (unsigned long)arg3; \
3901  _argvec[4] = (unsigned long)arg4; \
3902  _argvec[5] = (unsigned long)arg5; \
3903  _argvec[6] = (unsigned long)arg6; \
3904  __asm__ volatile( \
3905  VALGRIND_CFI_PROLOGUE \
3906  "aghi 15,-168\n\t" \
3907  "lg 2, 8(1)\n\t" \
3908  "lg 3,16(1)\n\t" \
3909  "lg 4,24(1)\n\t" \
3910  "lg 5,32(1)\n\t" \
3911  "lg 6,40(1)\n\t" \
3912  "mvc 160(8,15), 48(1)\n\t" \
3913  "lg 1, 0(1)\n\t" \
3914  VALGRIND_CALL_NOREDIR_R1 \
3915  "lgr %0, 2\n\t" \
3916  "aghi 15,168\n\t" \
3917  VALGRIND_CFI_EPILOGUE \
3918  : /*out*/ "=d" (_res) \
3919  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3920  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3921  ); \
3922  lval = (__typeof__(lval)) _res; \
3923  } while (0)
3924 
3925 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3926  arg6, arg7) \
3927  do { \
3928  volatile OrigFn _orig = (orig); \
3929  volatile unsigned long _argvec[8]; \
3930  volatile unsigned long _res; \
3931  _argvec[0] = (unsigned long)_orig.nraddr; \
3932  _argvec[1] = (unsigned long)arg1; \
3933  _argvec[2] = (unsigned long)arg2; \
3934  _argvec[3] = (unsigned long)arg3; \
3935  _argvec[4] = (unsigned long)arg4; \
3936  _argvec[5] = (unsigned long)arg5; \
3937  _argvec[6] = (unsigned long)arg6; \
3938  _argvec[7] = (unsigned long)arg7; \
3939  __asm__ volatile( \
3940  VALGRIND_CFI_PROLOGUE \
3941  "aghi 15,-176\n\t" \
3942  "lg 2, 8(1)\n\t" \
3943  "lg 3,16(1)\n\t" \
3944  "lg 4,24(1)\n\t" \
3945  "lg 5,32(1)\n\t" \
3946  "lg 6,40(1)\n\t" \
3947  "mvc 160(8,15), 48(1)\n\t" \
3948  "mvc 168(8,15), 56(1)\n\t" \
3949  "lg 1, 0(1)\n\t" \
3950  VALGRIND_CALL_NOREDIR_R1 \
3951  "lgr %0, 2\n\t" \
3952  "aghi 15,176\n\t" \
3953  VALGRIND_CFI_EPILOGUE \
3954  : /*out*/ "=d" (_res) \
3955  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3956  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3957  ); \
3958  lval = (__typeof__(lval)) _res; \
3959  } while (0)
3960 
3961 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3962  arg6, arg7 ,arg8) \
3963  do { \
3964  volatile OrigFn _orig = (orig); \
3965  volatile unsigned long _argvec[9]; \
3966  volatile unsigned long _res; \
3967  _argvec[0] = (unsigned long)_orig.nraddr; \
3968  _argvec[1] = (unsigned long)arg1; \
3969  _argvec[2] = (unsigned long)arg2; \
3970  _argvec[3] = (unsigned long)arg3; \
3971  _argvec[4] = (unsigned long)arg4; \
3972  _argvec[5] = (unsigned long)arg5; \
3973  _argvec[6] = (unsigned long)arg6; \
3974  _argvec[7] = (unsigned long)arg7; \
3975  _argvec[8] = (unsigned long)arg8; \
3976  __asm__ volatile( \
3977  VALGRIND_CFI_PROLOGUE \
3978  "aghi 15,-184\n\t" \
3979  "lg 2, 8(1)\n\t" \
3980  "lg 3,16(1)\n\t" \
3981  "lg 4,24(1)\n\t" \
3982  "lg 5,32(1)\n\t" \
3983  "lg 6,40(1)\n\t" \
3984  "mvc 160(8,15), 48(1)\n\t" \
3985  "mvc 168(8,15), 56(1)\n\t" \
3986  "mvc 176(8,15), 64(1)\n\t" \
3987  "lg 1, 0(1)\n\t" \
3988  VALGRIND_CALL_NOREDIR_R1 \
3989  "lgr %0, 2\n\t" \
3990  "aghi 15,184\n\t" \
3991  VALGRIND_CFI_EPILOGUE \
3992  : /*out*/ "=d" (_res) \
3993  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3994  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3995  ); \
3996  lval = (__typeof__(lval)) _res; \
3997  } while (0)
3998 
3999 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4000  arg6, arg7 ,arg8, arg9) \
4001  do { \
4002  volatile OrigFn _orig = (orig); \
4003  volatile unsigned long _argvec[10]; \
4004  volatile unsigned long _res; \
4005  _argvec[0] = (unsigned long)_orig.nraddr; \
4006  _argvec[1] = (unsigned long)arg1; \
4007  _argvec[2] = (unsigned long)arg2; \
4008  _argvec[3] = (unsigned long)arg3; \
4009  _argvec[4] = (unsigned long)arg4; \
4010  _argvec[5] = (unsigned long)arg5; \
4011  _argvec[6] = (unsigned long)arg6; \
4012  _argvec[7] = (unsigned long)arg7; \
4013  _argvec[8] = (unsigned long)arg8; \
4014  _argvec[9] = (unsigned long)arg9; \
4015  __asm__ volatile( \
4016  VALGRIND_CFI_PROLOGUE \
4017  "aghi 15,-192\n\t" \
4018  "lg 2, 8(1)\n\t" \
4019  "lg 3,16(1)\n\t" \
4020  "lg 4,24(1)\n\t" \
4021  "lg 5,32(1)\n\t" \
4022  "lg 6,40(1)\n\t" \
4023  "mvc 160(8,15), 48(1)\n\t" \
4024  "mvc 168(8,15), 56(1)\n\t" \
4025  "mvc 176(8,15), 64(1)\n\t" \
4026  "mvc 184(8,15), 72(1)\n\t" \
4027  "lg 1, 0(1)\n\t" \
4028  VALGRIND_CALL_NOREDIR_R1 \
4029  "lgr %0, 2\n\t" \
4030  "aghi 15,192\n\t" \
4031  VALGRIND_CFI_EPILOGUE \
4032  : /*out*/ "=d" (_res) \
4033  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4034  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4035  ); \
4036  lval = (__typeof__(lval)) _res; \
4037  } while (0)
4038 
4039 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4040  arg6, arg7 ,arg8, arg9, arg10) \
4041  do { \
4042  volatile OrigFn _orig = (orig); \
4043  volatile unsigned long _argvec[11]; \
4044  volatile unsigned long _res; \
4045  _argvec[0] = (unsigned long)_orig.nraddr; \
4046  _argvec[1] = (unsigned long)arg1; \
4047  _argvec[2] = (unsigned long)arg2; \
4048  _argvec[3] = (unsigned long)arg3; \
4049  _argvec[4] = (unsigned long)arg4; \
4050  _argvec[5] = (unsigned long)arg5; \
4051  _argvec[6] = (unsigned long)arg6; \
4052  _argvec[7] = (unsigned long)arg7; \
4053  _argvec[8] = (unsigned long)arg8; \
4054  _argvec[9] = (unsigned long)arg9; \
4055  _argvec[10] = (unsigned long)arg10; \
4056  __asm__ volatile( \
4057  VALGRIND_CFI_PROLOGUE \
4058  "aghi 15,-200\n\t" \
4059  "lg 2, 8(1)\n\t" \
4060  "lg 3,16(1)\n\t" \
4061  "lg 4,24(1)\n\t" \
4062  "lg 5,32(1)\n\t" \
4063  "lg 6,40(1)\n\t" \
4064  "mvc 160(8,15), 48(1)\n\t" \
4065  "mvc 168(8,15), 56(1)\n\t" \
4066  "mvc 176(8,15), 64(1)\n\t" \
4067  "mvc 184(8,15), 72(1)\n\t" \
4068  "mvc 192(8,15), 80(1)\n\t" \
4069  "lg 1, 0(1)\n\t" \
4070  VALGRIND_CALL_NOREDIR_R1 \
4071  "lgr %0, 2\n\t" \
4072  "aghi 15,200\n\t" \
4073  VALGRIND_CFI_EPILOGUE \
4074  : /*out*/ "=d" (_res) \
4075  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4076  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4077  ); \
4078  lval = (__typeof__(lval)) _res; \
4079  } while (0)
4080 
4081 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4082  arg6, arg7 ,arg8, arg9, arg10, arg11) \
4083  do { \
4084  volatile OrigFn _orig = (orig); \
4085  volatile unsigned long _argvec[12]; \
4086  volatile unsigned long _res; \
4087  _argvec[0] = (unsigned long)_orig.nraddr; \
4088  _argvec[1] = (unsigned long)arg1; \
4089  _argvec[2] = (unsigned long)arg2; \
4090  _argvec[3] = (unsigned long)arg3; \
4091  _argvec[4] = (unsigned long)arg4; \
4092  _argvec[5] = (unsigned long)arg5; \
4093  _argvec[6] = (unsigned long)arg6; \
4094  _argvec[7] = (unsigned long)arg7; \
4095  _argvec[8] = (unsigned long)arg8; \
4096  _argvec[9] = (unsigned long)arg9; \
4097  _argvec[10] = (unsigned long)arg10; \
4098  _argvec[11] = (unsigned long)arg11; \
4099  __asm__ volatile( \
4100  VALGRIND_CFI_PROLOGUE \
4101  "aghi 15,-208\n\t" \
4102  "lg 2, 8(1)\n\t" \
4103  "lg 3,16(1)\n\t" \
4104  "lg 4,24(1)\n\t" \
4105  "lg 5,32(1)\n\t" \
4106  "lg 6,40(1)\n\t" \
4107  "mvc 160(8,15), 48(1)\n\t" \
4108  "mvc 168(8,15), 56(1)\n\t" \
4109  "mvc 176(8,15), 64(1)\n\t" \
4110  "mvc 184(8,15), 72(1)\n\t" \
4111  "mvc 192(8,15), 80(1)\n\t" \
4112  "mvc 200(8,15), 88(1)\n\t" \
4113  "lg 1, 0(1)\n\t" \
4114  VALGRIND_CALL_NOREDIR_R1 \
4115  "lgr %0, 2\n\t" \
4116  "aghi 15,208\n\t" \
4117  VALGRIND_CFI_EPILOGUE \
4118  : /*out*/ "=d" (_res) \
4119  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4120  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4121  ); \
4122  lval = (__typeof__(lval)) _res; \
4123  } while (0)
4124 
4125 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4126  arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
4127  do { \
4128  volatile OrigFn _orig = (orig); \
4129  volatile unsigned long _argvec[13]; \
4130  volatile unsigned long _res; \
4131  _argvec[0] = (unsigned long)_orig.nraddr; \
4132  _argvec[1] = (unsigned long)arg1; \
4133  _argvec[2] = (unsigned long)arg2; \
4134  _argvec[3] = (unsigned long)arg3; \
4135  _argvec[4] = (unsigned long)arg4; \
4136  _argvec[5] = (unsigned long)arg5; \
4137  _argvec[6] = (unsigned long)arg6; \
4138  _argvec[7] = (unsigned long)arg7; \
4139  _argvec[8] = (unsigned long)arg8; \
4140  _argvec[9] = (unsigned long)arg9; \
4141  _argvec[10] = (unsigned long)arg10; \
4142  _argvec[11] = (unsigned long)arg11; \
4143  _argvec[12] = (unsigned long)arg12; \
4144  __asm__ volatile( \
4145  VALGRIND_CFI_PROLOGUE \
4146  "aghi 15,-216\n\t" \
4147  "lg 2, 8(1)\n\t" \
4148  "lg 3,16(1)\n\t" \
4149  "lg 4,24(1)\n\t" \
4150  "lg 5,32(1)\n\t" \
4151  "lg 6,40(1)\n\t" \
4152  "mvc 160(8,15), 48(1)\n\t" \
4153  "mvc 168(8,15), 56(1)\n\t" \
4154  "mvc 176(8,15), 64(1)\n\t" \
4155  "mvc 184(8,15), 72(1)\n\t" \
4156  "mvc 192(8,15), 80(1)\n\t" \
4157  "mvc 200(8,15), 88(1)\n\t" \
4158  "mvc 208(8,15), 96(1)\n\t" \
4159  "lg 1, 0(1)\n\t" \
4160  VALGRIND_CALL_NOREDIR_R1 \
4161  "lgr %0, 2\n\t" \
4162  "aghi 15,216\n\t" \
4163  VALGRIND_CFI_EPILOGUE \
4164  : /*out*/ "=d" (_res) \
4165  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4166  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4167  ); \
4168  lval = (__typeof__(lval)) _res; \
4169  } while (0)
4170 
4171 
4172 #endif /* PLAT_s390x_linux */
4173 
4174 /* ------------------------- mips32-linux ----------------------- */
4175 
4176 #if defined(PLAT_mips32_linux)
4177 
4178 /* These regs are trashed by the hidden call. */
4179 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4180 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4181 "$25", "$31"
4182 
4183 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4184  long) == 4. */
4185 
4186 #define CALL_FN_W_v(lval, orig) \
4187  do { \
4188  volatile OrigFn _orig = (orig); \
4189  volatile unsigned long _argvec[1]; \
4190  volatile unsigned long _res; \
4191  _argvec[0] = (unsigned long)_orig.nraddr; \
4192  __asm__ volatile( \
4193  "subu $29, $29, 8 \n\t" \
4194  "sw $28, 0($29) \n\t" \
4195  "sw $31, 4($29) \n\t" \
4196  "subu $29, $29, 16 \n\t" \
4197  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4198  VALGRIND_CALL_NOREDIR_T9 \
4199  "addu $29, $29, 16\n\t" \
4200  "lw $28, 0($29) \n\t" \
4201  "lw $31, 4($29) \n\t" \
4202  "addu $29, $29, 8 \n\t" \
4203  "move %0, $2\n" \
4204  : /*out*/ "=r" (_res) \
4205  : /*in*/ "0" (&_argvec[0]) \
4206  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4207  ); \
4208  lval = (__typeof__(lval)) _res; \
4209  } while (0)
4210 
4211 #define CALL_FN_W_W(lval, orig, arg1) \
4212  do { \
4213  volatile OrigFn _orig = (orig); \
4214  volatile unsigned long _argvec[2]; \
4215  volatile unsigned long _res; \
4216  _argvec[0] = (unsigned long)_orig.nraddr; \
4217  _argvec[1] = (unsigned long)(arg1); \
4218  __asm__ volatile( \
4219  "subu $29, $29, 8 \n\t" \
4220  "sw $28, 0($29) \n\t" \
4221  "sw $31, 4($29) \n\t" \
4222  "subu $29, $29, 16 \n\t" \
4223  "lw $4, 4(%1) \n\t" /* arg1*/ \
4224  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4225  VALGRIND_CALL_NOREDIR_T9 \
4226  "addu $29, $29, 16 \n\t" \
4227  "lw $28, 0($29) \n\t" \
4228  "lw $31, 4($29) \n\t" \
4229  "addu $29, $29, 8 \n\t" \
4230  "move %0, $2\n" \
4231  : /*out*/ "=r" (_res) \
4232  : /*in*/ "0" (&_argvec[0]) \
4233  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4234  ); \
4235  lval = (__typeof__(lval)) _res; \
4236  } while (0)
4237 
4238 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4239  do { \
4240  volatile OrigFn _orig = (orig); \
4241  volatile unsigned long _argvec[3]; \
4242  volatile unsigned long _res; \
4243  _argvec[0] = (unsigned long)_orig.nraddr; \
4244  _argvec[1] = (unsigned long)(arg1); \
4245  _argvec[2] = (unsigned long)(arg2); \
4246  __asm__ volatile( \
4247  "subu $29, $29, 8 \n\t" \
4248  "sw $28, 0($29) \n\t" \
4249  "sw $31, 4($29) \n\t" \
4250  "subu $29, $29, 16 \n\t" \
4251  "lw $4, 4(%1) \n\t" \
4252  "lw $5, 8(%1) \n\t" \
4253  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4254  VALGRIND_CALL_NOREDIR_T9 \
4255  "addu $29, $29, 16 \n\t" \
4256  "lw $28, 0($29) \n\t" \
4257  "lw $31, 4($29) \n\t" \
4258  "addu $29, $29, 8 \n\t" \
4259  "move %0, $2\n" \
4260  : /*out*/ "=r" (_res) \
4261  : /*in*/ "0" (&_argvec[0]) \
4262  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4263  ); \
4264  lval = (__typeof__(lval)) _res; \
4265  } while (0)
4266 
4267 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4268  do { \
4269  volatile OrigFn _orig = (orig); \
4270  volatile unsigned long _argvec[4]; \
4271  volatile unsigned long _res; \
4272  _argvec[0] = (unsigned long)_orig.nraddr; \
4273  _argvec[1] = (unsigned long)(arg1); \
4274  _argvec[2] = (unsigned long)(arg2); \
4275  _argvec[3] = (unsigned long)(arg3); \
4276  __asm__ volatile( \
4277  "subu $29, $29, 8 \n\t" \
4278  "sw $28, 0($29) \n\t" \
4279  "sw $31, 4($29) \n\t" \
4280  "subu $29, $29, 16 \n\t" \
4281  "lw $4, 4(%1) \n\t" \
4282  "lw $5, 8(%1) \n\t" \
4283  "lw $6, 12(%1) \n\t" \
4284  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4285  VALGRIND_CALL_NOREDIR_T9 \
4286  "addu $29, $29, 16 \n\t" \
4287  "lw $28, 0($29) \n\t" \
4288  "lw $31, 4($29) \n\t" \
4289  "addu $29, $29, 8 \n\t" \
4290  "move %0, $2\n" \
4291  : /*out*/ "=r" (_res) \
4292  : /*in*/ "0" (&_argvec[0]) \
4293  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4294  ); \
4295  lval = (__typeof__(lval)) _res; \
4296  } while (0)
4297 
4298 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4299  do { \
4300  volatile OrigFn _orig = (orig); \
4301  volatile unsigned long _argvec[5]; \
4302  volatile unsigned long _res; \
4303  _argvec[0] = (unsigned long)_orig.nraddr; \
4304  _argvec[1] = (unsigned long)(arg1); \
4305  _argvec[2] = (unsigned long)(arg2); \
4306  _argvec[3] = (unsigned long)(arg3); \
4307  _argvec[4] = (unsigned long)(arg4); \
4308  __asm__ volatile( \
4309  "subu $29, $29, 8 \n\t" \
4310  "sw $28, 0($29) \n\t" \
4311  "sw $31, 4($29) \n\t" \
4312  "subu $29, $29, 16 \n\t" \
4313  "lw $4, 4(%1) \n\t" \
4314  "lw $5, 8(%1) \n\t" \
4315  "lw $6, 12(%1) \n\t" \
4316  "lw $7, 16(%1) \n\t" \
4317  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4318  VALGRIND_CALL_NOREDIR_T9 \
4319  "addu $29, $29, 16 \n\t" \
4320  "lw $28, 0($29) \n\t" \
4321  "lw $31, 4($29) \n\t" \
4322  "addu $29, $29, 8 \n\t" \
4323  "move %0, $2\n" \
4324  : /*out*/ "=r" (_res) \
4325  : /*in*/ "0" (&_argvec[0]) \
4326  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4327  ); \
4328  lval = (__typeof__(lval)) _res; \
4329  } while (0)
4330 
4331 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4332  do { \
4333  volatile OrigFn _orig = (orig); \
4334  volatile unsigned long _argvec[6]; \
4335  volatile unsigned long _res; \
4336  _argvec[0] = (unsigned long)_orig.nraddr; \
4337  _argvec[1] = (unsigned long)(arg1); \
4338  _argvec[2] = (unsigned long)(arg2); \
4339  _argvec[3] = (unsigned long)(arg3); \
4340  _argvec[4] = (unsigned long)(arg4); \
4341  _argvec[5] = (unsigned long)(arg5); \
4342  __asm__ volatile( \
4343  "subu $29, $29, 8 \n\t" \
4344  "sw $28, 0($29) \n\t" \
4345  "sw $31, 4($29) \n\t" \
4346  "lw $4, 20(%1) \n\t" \
4347  "subu $29, $29, 24\n\t" \
4348  "sw $4, 16($29) \n\t" \
4349  "lw $4, 4(%1) \n\t" \
4350  "lw $5, 8(%1) \n\t" \
4351  "lw $6, 12(%1) \n\t" \
4352  "lw $7, 16(%1) \n\t" \
4353  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4354  VALGRIND_CALL_NOREDIR_T9 \
4355  "addu $29, $29, 24 \n\t" \
4356  "lw $28, 0($29) \n\t" \
4357  "lw $31, 4($29) \n\t" \
4358  "addu $29, $29, 8 \n\t" \
4359  "move %0, $2\n" \
4360  : /*out*/ "=r" (_res) \
4361  : /*in*/ "0" (&_argvec[0]) \
4362  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4363  ); \
4364  lval = (__typeof__(lval)) _res; \
4365  } while (0)
4366 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4367  do { \
4368  volatile OrigFn _orig = (orig); \
4369  volatile unsigned long _argvec[7]; \
4370  volatile unsigned long _res; \
4371  _argvec[0] = (unsigned long)_orig.nraddr; \
4372  _argvec[1] = (unsigned long)(arg1); \
4373  _argvec[2] = (unsigned long)(arg2); \
4374  _argvec[3] = (unsigned long)(arg3); \
4375  _argvec[4] = (unsigned long)(arg4); \
4376  _argvec[5] = (unsigned long)(arg5); \
4377  _argvec[6] = (unsigned long)(arg6); \
4378  __asm__ volatile( \
4379  "subu $29, $29, 8 \n\t" \
4380  "sw $28, 0($29) \n\t" \
4381  "sw $31, 4($29) \n\t" \
4382  "lw $4, 20(%1) \n\t" \
4383  "subu $29, $29, 32\n\t" \
4384  "sw $4, 16($29) \n\t" \
4385  "lw $4, 24(%1) \n\t" \
4386  "nop\n\t" \
4387  "sw $4, 20($29) \n\t" \
4388  "lw $4, 4(%1) \n\t" \
4389  "lw $5, 8(%1) \n\t" \
4390  "lw $6, 12(%1) \n\t" \
4391  "lw $7, 16(%1) \n\t" \
4392  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4393  VALGRIND_CALL_NOREDIR_T9 \
4394  "addu $29, $29, 32 \n\t" \
4395  "lw $28, 0($29) \n\t" \
4396  "lw $31, 4($29) \n\t" \
4397  "addu $29, $29, 8 \n\t" \
4398  "move %0, $2\n" \
4399  : /*out*/ "=r" (_res) \
4400  : /*in*/ "0" (&_argvec[0]) \
4401  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4402  ); \
4403  lval = (__typeof__(lval)) _res; \
4404  } while (0)
4405 
4406 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4407  arg7) \
4408  do { \
4409  volatile OrigFn _orig = (orig); \
4410  volatile unsigned long _argvec[8]; \
4411  volatile unsigned long _res; \
4412  _argvec[0] = (unsigned long)_orig.nraddr; \
4413  _argvec[1] = (unsigned long)(arg1); \
4414  _argvec[2] = (unsigned long)(arg2); \
4415  _argvec[3] = (unsigned long)(arg3); \
4416  _argvec[4] = (unsigned long)(arg4); \
4417  _argvec[5] = (unsigned long)(arg5); \
4418  _argvec[6] = (unsigned long)(arg6); \
4419  _argvec[7] = (unsigned long)(arg7); \
4420  __asm__ volatile( \
4421  "subu $29, $29, 8 \n\t" \
4422  "sw $28, 0($29) \n\t" \
4423  "sw $31, 4($29) \n\t" \
4424  "lw $4, 20(%1) \n\t" \
4425  "subu $29, $29, 32\n\t" \
4426  "sw $4, 16($29) \n\t" \
4427  "lw $4, 24(%1) \n\t" \
4428  "sw $4, 20($29) \n\t" \
4429  "lw $4, 28(%1) \n\t" \
4430  "sw $4, 24($29) \n\t" \
4431  "lw $4, 4(%1) \n\t" \
4432  "lw $5, 8(%1) \n\t" \
4433  "lw $6, 12(%1) \n\t" \
4434  "lw $7, 16(%1) \n\t" \
4435  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4436  VALGRIND_CALL_NOREDIR_T9 \
4437  "addu $29, $29, 32 \n\t" \
4438  "lw $28, 0($29) \n\t" \
4439  "lw $31, 4($29) \n\t" \
4440  "addu $29, $29, 8 \n\t" \
4441  "move %0, $2\n" \
4442  : /*out*/ "=r" (_res) \
4443  : /*in*/ "0" (&_argvec[0]) \
4444  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4445  ); \
4446  lval = (__typeof__(lval)) _res; \
4447  } while (0)
4448 
4449 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4450  arg7,arg8) \
4451  do { \
4452  volatile OrigFn _orig = (orig); \
4453  volatile unsigned long _argvec[9]; \
4454  volatile unsigned long _res; \
4455  _argvec[0] = (unsigned long)_orig.nraddr; \
4456  _argvec[1] = (unsigned long)(arg1); \
4457  _argvec[2] = (unsigned long)(arg2); \
4458  _argvec[3] = (unsigned long)(arg3); \
4459  _argvec[4] = (unsigned long)(arg4); \
4460  _argvec[5] = (unsigned long)(arg5); \
4461  _argvec[6] = (unsigned long)(arg6); \
4462  _argvec[7] = (unsigned long)(arg7); \
4463  _argvec[8] = (unsigned long)(arg8); \
4464  __asm__ volatile( \
4465  "subu $29, $29, 8 \n\t" \
4466  "sw $28, 0($29) \n\t" \
4467  "sw $31, 4($29) \n\t" \
4468  "lw $4, 20(%1) \n\t" \
4469  "subu $29, $29, 40\n\t" \
4470  "sw $4, 16($29) \n\t" \
4471  "lw $4, 24(%1) \n\t" \
4472  "sw $4, 20($29) \n\t" \
4473  "lw $4, 28(%1) \n\t" \
4474  "sw $4, 24($29) \n\t" \
4475  "lw $4, 32(%1) \n\t" \
4476  "sw $4, 28($29) \n\t" \
4477  "lw $4, 4(%1) \n\t" \
4478  "lw $5, 8(%1) \n\t" \
4479  "lw $6, 12(%1) \n\t" \
4480  "lw $7, 16(%1) \n\t" \
4481  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4482  VALGRIND_CALL_NOREDIR_T9 \
4483  "addu $29, $29, 40 \n\t" \
4484  "lw $28, 0($29) \n\t" \
4485  "lw $31, 4($29) \n\t" \
4486  "addu $29, $29, 8 \n\t" \
4487  "move %0, $2\n" \
4488  : /*out*/ "=r" (_res) \
4489  : /*in*/ "0" (&_argvec[0]) \
4490  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4491  ); \
4492  lval = (__typeof__(lval)) _res; \
4493  } while (0)
4494 
4495 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4496  arg7,arg8,arg9) \
4497  do { \
4498  volatile OrigFn _orig = (orig); \
4499  volatile unsigned long _argvec[10]; \
4500  volatile unsigned long _res; \
4501  _argvec[0] = (unsigned long)_orig.nraddr; \
4502  _argvec[1] = (unsigned long)(arg1); \
4503  _argvec[2] = (unsigned long)(arg2); \
4504  _argvec[3] = (unsigned long)(arg3); \
4505  _argvec[4] = (unsigned long)(arg4); \
4506  _argvec[5] = (unsigned long)(arg5); \
4507  _argvec[6] = (unsigned long)(arg6); \
4508  _argvec[7] = (unsigned long)(arg7); \
4509  _argvec[8] = (unsigned long)(arg8); \
4510  _argvec[9] = (unsigned long)(arg9); \
4511  __asm__ volatile( \
4512  "subu $29, $29, 8 \n\t" \
4513  "sw $28, 0($29) \n\t" \
4514  "sw $31, 4($29) \n\t" \
4515  "lw $4, 20(%1) \n\t" \
4516  "subu $29, $29, 40\n\t" \
4517  "sw $4, 16($29) \n\t" \
4518  "lw $4, 24(%1) \n\t" \
4519  "sw $4, 20($29) \n\t" \
4520  "lw $4, 28(%1) \n\t" \
4521  "sw $4, 24($29) \n\t" \
4522  "lw $4, 32(%1) \n\t" \
4523  "sw $4, 28($29) \n\t" \
4524  "lw $4, 36(%1) \n\t" \
4525  "sw $4, 32($29) \n\t" \
4526  "lw $4, 4(%1) \n\t" \
4527  "lw $5, 8(%1) \n\t" \
4528  "lw $6, 12(%1) \n\t" \
4529  "lw $7, 16(%1) \n\t" \
4530  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4531  VALGRIND_CALL_NOREDIR_T9 \
4532  "addu $29, $29, 40 \n\t" \
4533  "lw $28, 0($29) \n\t" \
4534  "lw $31, 4($29) \n\t" \
4535  "addu $29, $29, 8 \n\t" \
4536  "move %0, $2\n" \
4537  : /*out*/ "=r" (_res) \
4538  : /*in*/ "0" (&_argvec[0]) \
4539  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4540  ); \
4541  lval = (__typeof__(lval)) _res; \
4542  } while (0)
4543 
4544 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4545  arg7,arg8,arg9,arg10) \
4546  do { \
4547  volatile OrigFn _orig = (orig); \
4548  volatile unsigned long _argvec[11]; \
4549  volatile unsigned long _res; \
4550  _argvec[0] = (unsigned long)_orig.nraddr; \
4551  _argvec[1] = (unsigned long)(arg1); \
4552  _argvec[2] = (unsigned long)(arg2); \
4553  _argvec[3] = (unsigned long)(arg3); \
4554  _argvec[4] = (unsigned long)(arg4); \
4555  _argvec[5] = (unsigned long)(arg5); \
4556  _argvec[6] = (unsigned long)(arg6); \
4557  _argvec[7] = (unsigned long)(arg7); \
4558  _argvec[8] = (unsigned long)(arg8); \
4559  _argvec[9] = (unsigned long)(arg9); \
4560  _argvec[10] = (unsigned long)(arg10); \
4561  __asm__ volatile( \
4562  "subu $29, $29, 8 \n\t" \
4563  "sw $28, 0($29) \n\t" \
4564  "sw $31, 4($29) \n\t" \
4565  "lw $4, 20(%1) \n\t" \
4566  "subu $29, $29, 48\n\t" \
4567  "sw $4, 16($29) \n\t" \
4568  "lw $4, 24(%1) \n\t" \
4569  "sw $4, 20($29) \n\t" \
4570  "lw $4, 28(%1) \n\t" \
4571  "sw $4, 24($29) \n\t" \
4572  "lw $4, 32(%1) \n\t" \
4573  "sw $4, 28($29) \n\t" \
4574  "lw $4, 36(%1) \n\t" \
4575  "sw $4, 32($29) \n\t" \
4576  "lw $4, 40(%1) \n\t" \
4577  "sw $4, 36($29) \n\t" \
4578  "lw $4, 4(%1) \n\t" \
4579  "lw $5, 8(%1) \n\t" \
4580  "lw $6, 12(%1) \n\t" \
4581  "lw $7, 16(%1) \n\t" \
4582  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4583  VALGRIND_CALL_NOREDIR_T9 \
4584  "addu $29, $29, 48 \n\t" \
4585  "lw $28, 0($29) \n\t" \
4586  "lw $31, 4($29) \n\t" \
4587  "addu $29, $29, 8 \n\t" \
4588  "move %0, $2\n" \
4589  : /*out*/ "=r" (_res) \
4590  : /*in*/ "0" (&_argvec[0]) \
4591  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4592  ); \
4593  lval = (__typeof__(lval)) _res; \
4594  } while (0)
4595 
4596 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4597  arg6,arg7,arg8,arg9,arg10, \
4598  arg11) \
4599  do { \
4600  volatile OrigFn _orig = (orig); \
4601  volatile unsigned long _argvec[12]; \
4602  volatile unsigned long _res; \
4603  _argvec[0] = (unsigned long)_orig.nraddr; \
4604  _argvec[1] = (unsigned long)(arg1); \
4605  _argvec[2] = (unsigned long)(arg2); \
4606  _argvec[3] = (unsigned long)(arg3); \
4607  _argvec[4] = (unsigned long)(arg4); \
4608  _argvec[5] = (unsigned long)(arg5); \
4609  _argvec[6] = (unsigned long)(arg6); \
4610  _argvec[7] = (unsigned long)(arg7); \
4611  _argvec[8] = (unsigned long)(arg8); \
4612  _argvec[9] = (unsigned long)(arg9); \
4613  _argvec[10] = (unsigned long)(arg10); \
4614  _argvec[11] = (unsigned long)(arg11); \
4615  __asm__ volatile( \
4616  "subu $29, $29, 8 \n\t" \
4617  "sw $28, 0($29) \n\t" \
4618  "sw $31, 4($29) \n\t" \
4619  "lw $4, 20(%1) \n\t" \
4620  "subu $29, $29, 48\n\t" \
4621  "sw $4, 16($29) \n\t" \
4622  "lw $4, 24(%1) \n\t" \
4623  "sw $4, 20($29) \n\t" \
4624  "lw $4, 28(%1) \n\t" \
4625  "sw $4, 24($29) \n\t" \
4626  "lw $4, 32(%1) \n\t" \
4627  "sw $4, 28($29) \n\t" \
4628  "lw $4, 36(%1) \n\t" \
4629  "sw $4, 32($29) \n\t" \
4630  "lw $4, 40(%1) \n\t" \
4631  "sw $4, 36($29) \n\t" \
4632  "lw $4, 44(%1) \n\t" \
4633  "sw $4, 40($29) \n\t" \
4634  "lw $4, 4(%1) \n\t" \
4635  "lw $5, 8(%1) \n\t" \
4636  "lw $6, 12(%1) \n\t" \
4637  "lw $7, 16(%1) \n\t" \
4638  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4639  VALGRIND_CALL_NOREDIR_T9 \
4640  "addu $29, $29, 48 \n\t" \
4641  "lw $28, 0($29) \n\t" \
4642  "lw $31, 4($29) \n\t" \
4643  "addu $29, $29, 8 \n\t" \
4644  "move %0, $2\n" \
4645  : /*out*/ "=r" (_res) \
4646  : /*in*/ "0" (&_argvec[0]) \
4647  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4648  ); \
4649  lval = (__typeof__(lval)) _res; \
4650  } while (0)
4651 
4652 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4653  arg6,arg7,arg8,arg9,arg10, \
4654  arg11,arg12) \
4655  do { \
4656  volatile OrigFn _orig = (orig); \
4657  volatile unsigned long _argvec[13]; \
4658  volatile unsigned long _res; \
4659  _argvec[0] = (unsigned long)_orig.nraddr; \
4660  _argvec[1] = (unsigned long)(arg1); \
4661  _argvec[2] = (unsigned long)(arg2); \
4662  _argvec[3] = (unsigned long)(arg3); \
4663  _argvec[4] = (unsigned long)(arg4); \
4664  _argvec[5] = (unsigned long)(arg5); \
4665  _argvec[6] = (unsigned long)(arg6); \
4666  _argvec[7] = (unsigned long)(arg7); \
4667  _argvec[8] = (unsigned long)(arg8); \
4668  _argvec[9] = (unsigned long)(arg9); \
4669  _argvec[10] = (unsigned long)(arg10); \
4670  _argvec[11] = (unsigned long)(arg11); \
4671  _argvec[12] = (unsigned long)(arg12); \
4672  __asm__ volatile( \
4673  "subu $29, $29, 8 \n\t" \
4674  "sw $28, 0($29) \n\t" \
4675  "sw $31, 4($29) \n\t" \
4676  "lw $4, 20(%1) \n\t" \
4677  "subu $29, $29, 56\n\t" \
4678  "sw $4, 16($29) \n\t" \
4679  "lw $4, 24(%1) \n\t" \
4680  "sw $4, 20($29) \n\t" \
4681  "lw $4, 28(%1) \n\t" \
4682  "sw $4, 24($29) \n\t" \
4683  "lw $4, 32(%1) \n\t" \
4684  "sw $4, 28($29) \n\t" \
4685  "lw $4, 36(%1) \n\t" \
4686  "sw $4, 32($29) \n\t" \
4687  "lw $4, 40(%1) \n\t" \
4688  "sw $4, 36($29) \n\t" \
4689  "lw $4, 44(%1) \n\t" \
4690  "sw $4, 40($29) \n\t" \
4691  "lw $4, 48(%1) \n\t" \
4692  "sw $4, 44($29) \n\t" \
4693  "lw $4, 4(%1) \n\t" \
4694  "lw $5, 8(%1) \n\t" \
4695  "lw $6, 12(%1) \n\t" \
4696  "lw $7, 16(%1) \n\t" \
4697  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4698  VALGRIND_CALL_NOREDIR_T9 \
4699  "addu $29, $29, 56 \n\t" \
4700  "lw $28, 0($29) \n\t" \
4701  "lw $31, 4($29) \n\t" \
4702  "addu $29, $29, 8 \n\t" \
4703  "move %0, $2\n" \
4704  : /*out*/ "=r" (_res) \
4705  : /*in*/ "r" (&_argvec[0]) \
4706  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4707  ); \
4708  lval = (__typeof__(lval)) _res; \
4709  } while (0)
4710 
4711 #endif /* PLAT_mips32_linux */
4712 
4713 /* ------------------------- mips64-linux ------------------------- */
4714 
4715 #if defined(PLAT_mips64_linux)
4716 
4717 /* These regs are trashed by the hidden call. */
4718 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4719 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4720 "$25", "$31"
4721 
4722 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4723  long) == 4. */
4724 
4725 #define CALL_FN_W_v(lval, orig) \
4726  do { \
4727  volatile OrigFn _orig = (orig); \
4728  volatile unsigned long _argvec[1]; \
4729  volatile unsigned long _res; \
4730  _argvec[0] = (unsigned long)_orig.nraddr; \
4731  __asm__ volatile( \
4732  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4733  VALGRIND_CALL_NOREDIR_T9 \
4734  "move %0, $2\n" \
4735  : /*out*/ "=r" (_res) \
4736  : /*in*/ "0" (&_argvec[0]) \
4737  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4738  ); \
4739  lval = (__typeof__(lval)) _res; \
4740  } while (0)
4741 
4742 #define CALL_FN_W_W(lval, orig, arg1) \
4743  do { \
4744  volatile OrigFn _orig = (orig); \
4745  volatile unsigned long _argvec[2]; \
4746  volatile unsigned long _res; \
4747  _argvec[0] = (unsigned long)_orig.nraddr; \
4748  _argvec[1] = (unsigned long)(arg1); \
4749  __asm__ volatile( \
4750  "ld $4, 8(%1)\n\t" /* arg1*/ \
4751  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4752  VALGRIND_CALL_NOREDIR_T9 \
4753  "move %0, $2\n" \
4754  : /*out*/ "=r" (_res) \
4755  : /*in*/ "r" (&_argvec[0]) \
4756  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4757  ); \
4758  lval = (__typeof__(lval)) _res; \
4759  } while (0)
4760 
4761 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4762  do { \
4763  volatile OrigFn _orig = (orig); \
4764  volatile unsigned long _argvec[3]; \
4765  volatile unsigned long _res; \
4766  _argvec[0] = (unsigned long)_orig.nraddr; \
4767  _argvec[1] = (unsigned long)(arg1); \
4768  _argvec[2] = (unsigned long)(arg2); \
4769  __asm__ volatile( \
4770  "ld $4, 8(%1)\n\t" \
4771  "ld $5, 16(%1)\n\t" \
4772  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4773  VALGRIND_CALL_NOREDIR_T9 \
4774  "move %0, $2\n" \
4775  : /*out*/ "=r" (_res) \
4776  : /*in*/ "r" (&_argvec[0]) \
4777  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4778  ); \
4779  lval = (__typeof__(lval)) _res; \
4780  } while (0)
4781 
4782 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4783  do { \
4784  volatile OrigFn _orig = (orig); \
4785  volatile unsigned long _argvec[4]; \
4786  volatile unsigned long _res; \
4787  _argvec[0] = (unsigned long)_orig.nraddr; \
4788  _argvec[1] = (unsigned long)(arg1); \
4789  _argvec[2] = (unsigned long)(arg2); \
4790  _argvec[3] = (unsigned long)(arg3); \
4791  __asm__ volatile( \
4792  "ld $4, 8(%1)\n\t" \
4793  "ld $5, 16(%1)\n\t" \
4794  "ld $6, 24(%1)\n\t" \
4795  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4796  VALGRIND_CALL_NOREDIR_T9 \
4797  "move %0, $2\n" \
4798  : /*out*/ "=r" (_res) \
4799  : /*in*/ "r" (&_argvec[0]) \
4800  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4801  ); \
4802  lval = (__typeof__(lval)) _res; \
4803  } while (0)
4804 
4805 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4806  do { \
4807  volatile OrigFn _orig = (orig); \
4808  volatile unsigned long _argvec[5]; \
4809  volatile unsigned long _res; \
4810  _argvec[0] = (unsigned long)_orig.nraddr; \
4811  _argvec[1] = (unsigned long)(arg1); \
4812  _argvec[2] = (unsigned long)(arg2); \
4813  _argvec[3] = (unsigned long)(arg3); \
4814  _argvec[4] = (unsigned long)(arg4); \
4815  __asm__ volatile( \
4816  "ld $4, 8(%1)\n\t" \
4817  "ld $5, 16(%1)\n\t" \
4818  "ld $6, 24(%1)\n\t" \
4819  "ld $7, 32(%1)\n\t" \
4820  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4821  VALGRIND_CALL_NOREDIR_T9 \
4822  "move %0, $2\n" \
4823  : /*out*/ "=r" (_res) \
4824  : /*in*/ "r" (&_argvec[0]) \
4825  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4826  ); \
4827  lval = (__typeof__(lval)) _res; \
4828  } while (0)
4829 
4830 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4831  do { \
4832  volatile OrigFn _orig = (orig); \
4833  volatile unsigned long _argvec[6]; \
4834  volatile unsigned long _res; \
4835  _argvec[0] = (unsigned long)_orig.nraddr; \
4836  _argvec[1] = (unsigned long)(arg1); \
4837  _argvec[2] = (unsigned long)(arg2); \
4838  _argvec[3] = (unsigned long)(arg3); \
4839  _argvec[4] = (unsigned long)(arg4); \
4840  _argvec[5] = (unsigned long)(arg5); \
4841  __asm__ volatile( \
4842  "ld $4, 8(%1)\n\t" \
4843  "ld $5, 16(%1)\n\t" \
4844  "ld $6, 24(%1)\n\t" \
4845  "ld $7, 32(%1)\n\t" \
4846  "ld $8, 40(%1)\n\t" \
4847  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4848  VALGRIND_CALL_NOREDIR_T9 \
4849  "move %0, $2\n" \
4850  : /*out*/ "=r" (_res) \
4851  : /*in*/ "r" (&_argvec[0]) \
4852  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4853  ); \
4854  lval = (__typeof__(lval)) _res; \
4855  } while (0)
4856 
4857 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4858  do { \
4859  volatile OrigFn _orig = (orig); \
4860  volatile unsigned long _argvec[7]; \
4861  volatile unsigned long _res; \
4862  _argvec[0] = (unsigned long)_orig.nraddr; \
4863  _argvec[1] = (unsigned long)(arg1); \
4864  _argvec[2] = (unsigned long)(arg2); \
4865  _argvec[3] = (unsigned long)(arg3); \
4866  _argvec[4] = (unsigned long)(arg4); \
4867  _argvec[5] = (unsigned long)(arg5); \
4868  _argvec[6] = (unsigned long)(arg6); \
4869  __asm__ volatile( \
4870  "ld $4, 8(%1)\n\t" \
4871  "ld $5, 16(%1)\n\t" \
4872  "ld $6, 24(%1)\n\t" \
4873  "ld $7, 32(%1)\n\t" \
4874  "ld $8, 40(%1)\n\t" \
4875  "ld $9, 48(%1)\n\t" \
4876  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4877  VALGRIND_CALL_NOREDIR_T9 \
4878  "move %0, $2\n" \
4879  : /*out*/ "=r" (_res) \
4880  : /*in*/ "r" (&_argvec[0]) \
4881  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4882  ); \
4883  lval = (__typeof__(lval)) _res; \
4884  } while (0)
4885 
4886 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4887  arg7) \
4888  do { \
4889  volatile OrigFn _orig = (orig); \
4890  volatile unsigned long _argvec[8]; \
4891  volatile unsigned long _res; \
4892  _argvec[0] = (unsigned long)_orig.nraddr; \
4893  _argvec[1] = (unsigned long)(arg1); \
4894  _argvec[2] = (unsigned long)(arg2); \
4895  _argvec[3] = (unsigned long)(arg3); \
4896  _argvec[4] = (unsigned long)(arg4); \
4897  _argvec[5] = (unsigned long)(arg5); \
4898  _argvec[6] = (unsigned long)(arg6); \
4899  _argvec[7] = (unsigned long)(arg7); \
4900  __asm__ volatile( \
4901  "ld $4, 8(%1)\n\t" \
4902  "ld $5, 16(%1)\n\t" \
4903  "ld $6, 24(%1)\n\t" \
4904  "ld $7, 32(%1)\n\t" \
4905  "ld $8, 40(%1)\n\t" \
4906  "ld $9, 48(%1)\n\t" \
4907  "ld $10, 56(%1)\n\t" \
4908  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4909  VALGRIND_CALL_NOREDIR_T9 \
4910  "move %0, $2\n" \
4911  : /*out*/ "=r" (_res) \
4912  : /*in*/ "r" (&_argvec[0]) \
4913  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4914  ); \
4915  lval = (__typeof__(lval)) _res; \
4916  } while (0)
4917 
4918 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4919  arg7,arg8) \
4920  do { \
4921  volatile OrigFn _orig = (orig); \
4922  volatile unsigned long _argvec[9]; \
4923  volatile unsigned long _res; \
4924  _argvec[0] = (unsigned long)_orig.nraddr; \
4925  _argvec[1] = (unsigned long)(arg1); \
4926  _argvec[2] = (unsigned long)(arg2); \
4927  _argvec[3] = (unsigned long)(arg3); \
4928  _argvec[4] = (unsigned long)(arg4); \
4929  _argvec[5] = (unsigned long)(arg5); \
4930  _argvec[6] = (unsigned long)(arg6); \
4931  _argvec[7] = (unsigned long)(arg7); \
4932  _argvec[8] = (unsigned long)(arg8); \
4933  __asm__ volatile( \
4934  "ld $4, 8(%1)\n\t" \
4935  "ld $5, 16(%1)\n\t" \
4936  "ld $6, 24(%1)\n\t" \
4937  "ld $7, 32(%1)\n\t" \
4938  "ld $8, 40(%1)\n\t" \
4939  "ld $9, 48(%1)\n\t" \
4940  "ld $10, 56(%1)\n\t" \
4941  "ld $11, 64(%1)\n\t" \
4942  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4943  VALGRIND_CALL_NOREDIR_T9 \
4944  "move %0, $2\n" \
4945  : /*out*/ "=r" (_res) \
4946  : /*in*/ "r" (&_argvec[0]) \
4947  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4948  ); \
4949  lval = (__typeof__(lval)) _res; \
4950  } while (0)
4951 
4952 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4953  arg7,arg8,arg9) \
4954  do { \
4955  volatile OrigFn _orig = (orig); \
4956  volatile unsigned long _argvec[10]; \
4957  volatile unsigned long _res; \
4958  _argvec[0] = (unsigned long)_orig.nraddr; \
4959  _argvec[1] = (unsigned long)(arg1); \
4960  _argvec[2] = (unsigned long)(arg2); \
4961  _argvec[3] = (unsigned long)(arg3); \
4962  _argvec[4] = (unsigned long)(arg4); \
4963  _argvec[5] = (unsigned long)(arg5); \
4964  _argvec[6] = (unsigned long)(arg6); \
4965  _argvec[7] = (unsigned long)(arg7); \
4966  _argvec[8] = (unsigned long)(arg8); \
4967  _argvec[9] = (unsigned long)(arg9); \
4968  __asm__ volatile( \
4969  "dsubu $29, $29, 8\n\t" \
4970  "ld $4, 72(%1)\n\t" \
4971  "sd $4, 0($29)\n\t" \
4972  "ld $4, 8(%1)\n\t" \
4973  "ld $5, 16(%1)\n\t" \
4974  "ld $6, 24(%1)\n\t" \
4975  "ld $7, 32(%1)\n\t" \
4976  "ld $8, 40(%1)\n\t" \
4977  "ld $9, 48(%1)\n\t" \
4978  "ld $10, 56(%1)\n\t" \
4979  "ld $11, 64(%1)\n\t" \
4980  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4981  VALGRIND_CALL_NOREDIR_T9 \
4982  "daddu $29, $29, 8\n\t" \
4983  "move %0, $2\n" \
4984  : /*out*/ "=r" (_res) \
4985  : /*in*/ "r" (&_argvec[0]) \
4986  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4987  ); \
4988  lval = (__typeof__(lval)) _res; \
4989  } while (0)
4990 
4991 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4992  arg7,arg8,arg9,arg10) \
4993  do { \
4994  volatile OrigFn _orig = (orig); \
4995  volatile unsigned long _argvec[11]; \
4996  volatile unsigned long _res; \
4997  _argvec[0] = (unsigned long)_orig.nraddr; \
4998  _argvec[1] = (unsigned long)(arg1); \
4999  _argvec[2] = (unsigned long)(arg2); \
5000  _argvec[3] = (unsigned long)(arg3); \
5001  _argvec[4] = (unsigned long)(arg4); \
5002  _argvec[5] = (unsigned long)(arg5); \
5003  _argvec[6] = (unsigned long)(arg6); \
5004  _argvec[7] = (unsigned long)(arg7); \
5005  _argvec[8] = (unsigned long)(arg8); \
5006  _argvec[9] = (unsigned long)(arg9); \
5007  _argvec[10] = (unsigned long)(arg10); \
5008  __asm__ volatile( \
5009  "dsubu $29, $29, 16\n\t" \
5010  "ld $4, 72(%1)\n\t" \
5011  "sd $4, 0($29)\n\t" \
5012  "ld $4, 80(%1)\n\t" \
5013  "sd $4, 8($29)\n\t" \
5014  "ld $4, 8(%1)\n\t" \
5015  "ld $5, 16(%1)\n\t" \
5016  "ld $6, 24(%1)\n\t" \
5017  "ld $7, 32(%1)\n\t" \
5018  "ld $8, 40(%1)\n\t" \
5019  "ld $9, 48(%1)\n\t" \
5020  "ld $10, 56(%1)\n\t" \
5021  "ld $11, 64(%1)\n\t" \
5022  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5023  VALGRIND_CALL_NOREDIR_T9 \
5024  "daddu $29, $29, 16\n\t" \
5025  "move %0, $2\n" \
5026  : /*out*/ "=r" (_res) \
5027  : /*in*/ "r" (&_argvec[0]) \
5028  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5029  ); \
5030  lval = (__typeof__(lval)) _res; \
5031  } while (0)
5032 
5033 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5034  arg6,arg7,arg8,arg9,arg10, \
5035  arg11) \
5036  do { \
5037  volatile OrigFn _orig = (orig); \
5038  volatile unsigned long _argvec[12]; \
5039  volatile unsigned long _res; \
5040  _argvec[0] = (unsigned long)_orig.nraddr; \
5041  _argvec[1] = (unsigned long)(arg1); \
5042  _argvec[2] = (unsigned long)(arg2); \
5043  _argvec[3] = (unsigned long)(arg3); \
5044  _argvec[4] = (unsigned long)(arg4); \
5045  _argvec[5] = (unsigned long)(arg5); \
5046  _argvec[6] = (unsigned long)(arg6); \
5047  _argvec[7] = (unsigned long)(arg7); \
5048  _argvec[8] = (unsigned long)(arg8); \
5049  _argvec[9] = (unsigned long)(arg9); \
5050  _argvec[10] = (unsigned long)(arg10); \
5051  _argvec[11] = (unsigned long)(arg11); \
5052  __asm__ volatile( \
5053  "dsubu $29, $29, 24\n\t" \
5054  "ld $4, 72(%1)\n\t" \
5055  "sd $4, 0($29)\n\t" \
5056  "ld $4, 80(%1)\n\t" \
5057  "sd $4, 8($29)\n\t" \
5058  "ld $4, 88(%1)\n\t" \
5059  "sd $4, 16($29)\n\t" \
5060  "ld $4, 8(%1)\n\t" \
5061  "ld $5, 16(%1)\n\t" \
5062  "ld $6, 24(%1)\n\t" \
5063  "ld $7, 32(%1)\n\t" \
5064  "ld $8, 40(%1)\n\t" \
5065  "ld $9, 48(%1)\n\t" \
5066  "ld $10, 56(%1)\n\t" \
5067  "ld $11, 64(%1)\n\t" \
5068  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5069  VALGRIND_CALL_NOREDIR_T9 \
5070  "daddu $29, $29, 24\n\t" \
5071  "move %0, $2\n" \
5072  : /*out*/ "=r" (_res) \
5073  : /*in*/ "r" (&_argvec[0]) \
5074  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5075  ); \
5076  lval = (__typeof__(lval)) _res; \
5077  } while (0)
5078 
5079 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5080  arg6,arg7,arg8,arg9,arg10, \
5081  arg11,arg12) \
5082  do { \
5083  volatile OrigFn _orig = (orig); \
5084  volatile unsigned long _argvec[13]; \
5085  volatile unsigned long _res; \
5086  _argvec[0] = (unsigned long)_orig.nraddr; \
5087  _argvec[1] = (unsigned long)(arg1); \
5088  _argvec[2] = (unsigned long)(arg2); \
5089  _argvec[3] = (unsigned long)(arg3); \
5090  _argvec[4] = (unsigned long)(arg4); \
5091  _argvec[5] = (unsigned long)(arg5); \
5092  _argvec[6] = (unsigned long)(arg6); \
5093  _argvec[7] = (unsigned long)(arg7); \
5094  _argvec[8] = (unsigned long)(arg8); \
5095  _argvec[9] = (unsigned long)(arg9); \
5096  _argvec[10] = (unsigned long)(arg10); \
5097  _argvec[11] = (unsigned long)(arg11); \
5098  _argvec[12] = (unsigned long)(arg12); \
5099  __asm__ volatile( \
5100  "dsubu $29, $29, 32\n\t" \
5101  "ld $4, 72(%1)\n\t" \
5102  "sd $4, 0($29)\n\t" \
5103  "ld $4, 80(%1)\n\t" \
5104  "sd $4, 8($29)\n\t" \
5105  "ld $4, 88(%1)\n\t" \
5106  "sd $4, 16($29)\n\t" \
5107  "ld $4, 96(%1)\n\t" \
5108  "sd $4, 24($29)\n\t" \
5109  "ld $4, 8(%1)\n\t" \
5110  "ld $5, 16(%1)\n\t" \
5111  "ld $6, 24(%1)\n\t" \
5112  "ld $7, 32(%1)\n\t" \
5113  "ld $8, 40(%1)\n\t" \
5114  "ld $9, 48(%1)\n\t" \
5115  "ld $10, 56(%1)\n\t" \
5116  "ld $11, 64(%1)\n\t" \
5117  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5118  VALGRIND_CALL_NOREDIR_T9 \
5119  "daddu $29, $29, 32\n\t" \
5120  "move %0, $2\n" \
5121  : /*out*/ "=r" (_res) \
5122  : /*in*/ "r" (&_argvec[0]) \
5123  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5124  ); \
5125  lval = (__typeof__(lval)) _res; \
5126  } while (0)
5127 
5128 #endif /* PLAT_mips64_linux */
5129 
5130 
5131 /* ------------------------------------------------------------------ */
5132 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
5133 /* */
5134 /* ------------------------------------------------------------------ */
5135 
5136 /* Some request codes. There are many more of these, but most are not
5137  exposed to end-user view. These are the public ones, all of the
5138  form 0x1000 + small_number.
5139 
5140  Core ones are in the range 0x00000000--0x0000ffff. The non-public
5141  ones start at 0x2000.
5142 */
5143 
5144 /* These macros are used by tools -- they must be public, but don't
5145  embed them into other programs. */
5146 #define VG_USERREQ_TOOL_BASE(a,b) \
5147  ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
5148 #define VG_IS_TOOL_USERREQ(a, b, v) \
5149  (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
5150 
5151 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
5152  This enum comprises an ABI exported by Valgrind to programs
5153  which use client requests. DO NOT CHANGE THE ORDER OF THESE
5154  ENTRIES, NOR DELETE ANY -- add new ones at the end. */
5155 typedef
5158 
5159  /* These allow any function to be called from the simulated
5160  CPU but run on the real CPU. Nb: the first arg passed to
5161  the function is always the ThreadId of the running
5162  thread! So CLIENT_CALL0 actually requires a 1 arg
5163  function, etc. */
5168 
5169  /* Can be useful in regression testing suites -- eg. can
5170  send Valgrind's output to /dev/null and still count
5171  errors. */
5173 
5174  /* Allows the client program and/or gdbserver to execute a monitor
5175  command. */
5177 
5178  /* These are useful and can be interpreted by any tool that
5179  tracks malloc() et al, by using vg_replace_malloc.c. */
5183  /* Memory pool support. */
5192 
5193  /* Allow printfs to valgrind log. */
5194  /* The first two pass the va_list argument by value, which
5195  assumes it is the same size as or smaller than a UWord,
5196  which generally isn't the case. Hence are deprecated.
5197  The second two pass the vargs by reference and so are
5198  immune to this problem. */
5199  /* both :: char* fmt, va_list vargs (DEPRECATED) */
5202  /* both :: char* fmt, va_list* vargs */
5205 
5206  /* Stack support. */
5210 
5211  /* Wine support */
5213 
5214  /* Querying of debug info. */
5216 
5217  /* Disable/enable error reporting level. Takes a single
5218  Word arg which is the delta to this thread's error
5219  disablement indicator. Hence 1 disables or further
5220  disables errors, and -1 moves back towards enablement.
5221  Other values are not allowed. */
5223 
5224  /* Initialise IR injection */
5227 
5228 #if !defined(__GNUC__)
5229 # define __extension__ /* */
5230 #endif
5231 
5232 
5233 /* Returns the number of Valgrinds this code is running under. That
5234  is, 0 if running natively, 1 if running under Valgrind, 2 if
5235  running under Valgrind which is running under another Valgrind,
5236  etc. */
5237 #define RUNNING_ON_VALGRIND \
5238  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5239  VG_USERREQ__RUNNING_ON_VALGRIND, \
5240  0, 0, 0, 0, 0) \
5241 
5242 
5243 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5244  _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5245  since it provides a way to make sure valgrind will retranslate the
5246  invalidated area. Returns no value. */
5247 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5248  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5249  _qzz_addr, _qzz_len, 0, 0, 0)
5250 
5251 
5252 /* These requests are for getting Valgrind itself to print something.
5253  Possibly with a backtrace. This is a really ugly hack. The return value
5254  is the number of characters printed, excluding the "**<pid>** " part at the
5255  start and the backtrace (if present). */
5256 
5257 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5258 /* Modern GCC will optimize the static routine out if unused,
5259  and unused attribute will shut down warnings about it. */
5260 static int VALGRIND_PRINTF(const char *format, ...)
5261  __attribute__((format(__printf__, 1, 2), __unused__));
5262 #endif
5263 static int
5264 #if defined(_MSC_VER)
5265 __inline
5266 #endif
5267 VALGRIND_PRINTF(const char *format, ...)
5268 {
5269 #if defined(NVALGRIND)
5270  return 0;
5271 #else /* NVALGRIND */
5272 #if defined(_MSC_VER) || defined(__MINGW64__)
5273  uintptr_t _qzz_res;
5274 #else
5275  unsigned long _qzz_res;
5276 #endif
5277  va_list vargs;
5278  va_start(vargs, format);
5279 #if defined(_MSC_VER) || defined(__MINGW64__)
5280  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5282  (uintptr_t)format,
5283  (uintptr_t)&vargs,
5284  0, 0, 0);
5285 #else
5286  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5288  (unsigned long)format,
5289  (unsigned long)&vargs,
5290  0, 0, 0);
5291 #endif
5292  va_end(vargs);
5293  return (int)_qzz_res;
5294 #endif /* NVALGRIND */
5295 }
5296 
5297 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5298 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5299  __attribute__((format(__printf__, 1, 2), __unused__));
5300 #endif
5301 static int
5302 #if defined(_MSC_VER)
5303 __inline
5304 #endif
5305 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5306 {
5307 #if defined(NVALGRIND)
5308  return 0;
5309 #else /* NVALGRIND */
5310 #if defined(_MSC_VER) || defined(__MINGW64__)
5311  uintptr_t _qzz_res;
5312 #else
5313  unsigned long _qzz_res;
5314 #endif
5315  va_list vargs;
5316  va_start(vargs, format);
5317 #if defined(_MSC_VER) || defined(__MINGW64__)
5318  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5320  (uintptr_t)format,
5321  (uintptr_t)&vargs,
5322  0, 0, 0);
5323 #else
5324  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5326  (unsigned long)format,
5327  (unsigned long)&vargs,
5328  0, 0, 0);
5329 #endif
5330  va_end(vargs);
5331  return (int)_qzz_res;
5332 #endif /* NVALGRIND */
5333 }
5334 
5335 
5336 /* These requests allow control to move from the simulated CPU to the
5337  real CPU, calling an arbitrary function.
5338 
5339  Note that the current ThreadId is inserted as the first argument.
5340  So this call:
5341 
5342  VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5343 
5344  requires f to have this signature:
5345 
5346  Word f(Word tid, Word arg1, Word arg2)
5347 
5348  where "Word" is a word-sized type.
5349 
5350  Note that these client requests are not entirely reliable. For example,
5351  if you call a function with them that subsequently calls printf(),
5352  there's a high chance Valgrind will crash. Generally, your prospects of
5353  these working are made higher if the called function does not refer to
5354  any global variables, and does not refer to any libc or other functions
5355  (printf et al). Any kind of entanglement with libc or dynamic linking is
5356  likely to have a bad outcome, for tricky reasons which we've grappled
5357  with a lot in the past.
5358 */
5359 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5360  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5361  VG_USERREQ__CLIENT_CALL0, \
5362  _qyy_fn, \
5363  0, 0, 0, 0)
5364 
5365 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5366  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5367  VG_USERREQ__CLIENT_CALL1, \
5368  _qyy_fn, \
5369  _qyy_arg1, 0, 0, 0)
5370 
5371 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5372  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5373  VG_USERREQ__CLIENT_CALL2, \
5374  _qyy_fn, \
5375  _qyy_arg1, _qyy_arg2, 0, 0)
5376 
5377 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5378  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5379  VG_USERREQ__CLIENT_CALL3, \
5380  _qyy_fn, \
5381  _qyy_arg1, _qyy_arg2, \
5382  _qyy_arg3, 0)
5383 
5384 
5385 /* Counts the number of errors that have been recorded by a tool. Nb:
5386  the tool must record the errors with VG_(maybe_record_error)() or
5387  VG_(unique_error)() for them to be counted. */
5388 #define VALGRIND_COUNT_ERRORS \
5389  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5390  0 /* default return */, \
5391  VG_USERREQ__COUNT_ERRORS, \
5392  0, 0, 0, 0, 0)
5393 
5394 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5395  when heap blocks are allocated in order to give accurate results. This
5396  happens automatically for the standard allocator functions such as
5397  malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5398  delete[], etc.
5399 
5400  But if your program uses a custom allocator, this doesn't automatically
5401  happen, and Valgrind will not do as well. For example, if you allocate
5402  superblocks with mmap() and then allocates chunks of the superblocks, all
5403  Valgrind's observations will be at the mmap() level and it won't know that
5404  the chunks should be considered separate entities. In Memcheck's case,
5405  that means you probably won't get heap block overrun detection (because
5406  there won't be redzones marked as unaddressable) and you definitely won't
5407  get any leak detection.
5408 
5409  The following client requests allow a custom allocator to be annotated so
5410  that it can be handled accurately by Valgrind.
5411 
5412  VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5413  by a malloc()-like function. For Memcheck (an illustrative case), this
5414  does two things:
5415 
5416  - It records that the block has been allocated. This means any addresses
5417  within the block mentioned in error messages will be
5418  identified as belonging to the block. It also means that if the block
5419  isn't freed it will be detected by the leak checker.
5420 
5421  - It marks the block as being addressable and undefined (if 'is_zeroed' is
5422  not set), or addressable and defined (if 'is_zeroed' is set). This
5423  controls how accesses to the block by the program are handled.
5424 
5425  'addr' is the start of the usable block (ie. after any
5426  redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5427  can apply redzones -- these are blocks of padding at the start and end of
5428  each block. Adding redzones is recommended as it makes it much more likely
5429  Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5430  zeroed (or filled with another predictable value), as is the case for
5431  calloc().
5432 
5433  VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5434  heap block -- that will be used by the client program -- is allocated.
5435  It's best to put it at the outermost level of the allocator if possible;
5436  for example, if you have a function my_alloc() which calls
5437  internal_alloc(), and the client request is put inside internal_alloc(),
5438  stack traces relating to the heap block will contain entries for both
5439  my_alloc() and internal_alloc(), which is probably not what you want.
5440 
5441  For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5442  custom blocks from within a heap block, B, that has been allocated with
5443  malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5444  -- the custom blocks will take precedence.
5445 
5446  VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5447  Memcheck, it does two things:
5448 
5449  - It records that the block has been deallocated. This assumes that the
5450  block was annotated as having been allocated via
5451  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5452 
5453  - It marks the block as being unaddressable.
5454 
5455  VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5456  heap block is deallocated.
5457 
5458  VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5459  Memcheck, it does four things:
5460 
5461  - It records that the size of a block has been changed. This assumes that
5462  the block was annotated as having been allocated via
5463  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5464 
5465  - If the block shrunk, it marks the freed memory as being unaddressable.
5466 
5467  - If the block grew, it marks the new area as undefined and defines a red
5468  zone past the end of the new block.
5469 
5470  - The V-bits of the overlap between the old and the new block are preserved.
5471 
5472  VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5473  and before deallocation of the old block.
5474 
5475  In many cases, these three client requests will not be enough to get your
5476  allocator working well with Memcheck. More specifically, if your allocator
5477  writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5478  will be necessary to mark the memory as addressable just before the zeroing
5479  occurs, otherwise you'll get a lot of invalid write errors. For example,
5480  you'll need to do this if your allocator recycles freed blocks, but it
5481  zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5482  Alternatively, if your allocator reuses freed blocks for allocator-internal
5483  data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5484 
5485  Really, what's happening is a blurring of the lines between the client
5486  program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5487  memory should be considered unaddressable to the client program, but the
5488  allocator knows more than the rest of the client program and so may be able
5489  to safely access it. Extra client requests are necessary for Valgrind to
5490  understand the distinction between the allocator and the rest of the
5491  program.
5492 
5493  Ignored if addr == 0.
5494 */
5495 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5496  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5497  addr, sizeB, rzB, is_zeroed, 0)
5498 
5499 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5500  Ignored if addr == 0.
5501 */
5502 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5503  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5504  addr, oldSizeB, newSizeB, rzB, 0)
5505 
5506 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5507  Ignored if addr == 0.
5508 */
5509 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5510  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5511  addr, rzB, 0, 0, 0)
5512 
5513 /* Create a memory pool. */
5514 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5515  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5516  pool, rzB, is_zeroed, 0, 0)
5517 
5518 /* Destroy a memory pool. */
5519 #define VALGRIND_DESTROY_MEMPOOL(pool) \
5520  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5521  pool, 0, 0, 0, 0)
5522 
5523 /* Associate a piece of memory with a memory pool. */
5524 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5525  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5526  pool, addr, size, 0, 0)
5527 
5528 /* Disassociate a piece of memory from a memory pool. */
5529 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
5530  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5531  pool, addr, 0, 0, 0)
5532 
5533 /* Disassociate any pieces outside a particular range. */
5534 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5535  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5536  pool, addr, size, 0, 0)
5537 
5538 /* Resize and/or move a piece associated with a memory pool. */
5539 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5540  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5541  poolA, poolB, 0, 0, 0)
5542 
5543 /* Resize and/or move a piece associated with a memory pool. */
5544 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5545  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5546  pool, addrA, addrB, size, 0)
5547 
5548 /* Return 1 if a mempool exists, else 0. */
5549 #define VALGRIND_MEMPOOL_EXISTS(pool) \
5550  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5551  VG_USERREQ__MEMPOOL_EXISTS, \
5552  pool, 0, 0, 0, 0)
5553 
5554 /* Mark a piece of memory as being a stack. Returns a stack id. */
5555 #define VALGRIND_STACK_REGISTER(start, end) \
5556  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5557  VG_USERREQ__STACK_REGISTER, \
5558  start, end, 0, 0, 0)
5559 
5560 /* Unmark the piece of memory associated with a stack id as being a
5561  stack. */
5562 #define VALGRIND_STACK_DEREGISTER(id) \
5563  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
5564  id, 0, 0, 0, 0)
5565 
5566 /* Change the start and end address of the stack id. */
5567 #define VALGRIND_STACK_CHANGE(id, start, end) \
5568  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
5569  id, start, end, 0, 0)
5570 
5571 /* Load PDB debug info for Wine PE image_map. */
5572 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
5573  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
5574  fd, ptr, total_size, delta, 0)
5575 
5576 /* Map a code address to a source file name and line number. buf64
5577  must point to a 64-byte buffer in the caller's address space. The
5578  result will be dumped in there and is guaranteed to be zero
5579  terminated. If no info is found, the first byte is set to zero. */
5580 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
5581  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5582  VG_USERREQ__MAP_IP_TO_SRCLOC, \
5583  addr, buf64, 0, 0, 0)
5584 
5585 /* Disable error reporting for this thread. Behaves in a stack like
5586  way, so you can safely call this multiple times provided that
5587  VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
5588  to re-enable reporting. The first call of this macro disables
5589  reporting. Subsequent calls have no effect except to increase the
5590  number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
5591  reporting. Child threads do not inherit this setting from their
5592  parents -- they are always created with reporting enabled. */
5593 #define VALGRIND_DISABLE_ERROR_REPORTING \
5594  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5595  1, 0, 0, 0, 0)
5596 
5597 /* Re-enable error reporting, as per comments on
5598  VALGRIND_DISABLE_ERROR_REPORTING. */
5599 #define VALGRIND_ENABLE_ERROR_REPORTING \
5600  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5601  -1, 0, 0, 0, 0)
5602 
5603 /* Execute a monitor command from the client program.
5604  If a connection is opened with GDB, the output will be sent
5605  according to the output mode set for vgdb.
5606  If no connection is opened, output will go to the log output.
5607  Returns 1 if command not recognised, 0 otherwise. */
5608 #define VALGRIND_MONITOR_COMMAND(command) \
5609  VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
5610  command, 0, 0, 0, 0)
5611 
5612 
5613 #undef PLAT_x86_darwin
5614 #undef PLAT_amd64_darwin
5615 #undef PLAT_x86_win32
5616 #undef PLAT_amd64_win64
5617 #undef PLAT_x86_linux
5618 #undef PLAT_amd64_linux
5619 #undef PLAT_ppc32_linux
5620 #undef PLAT_ppc64_linux
5621 #undef PLAT_arm_linux
5622 #undef PLAT_s390x_linux
5623 #undef PLAT_mips32_linux
5624 #undef PLAT_mips64_linux
5625 
5626 #endif /* __VALGRIND_H */
5627 
5628 // clang-format on
5629 // NOLINTEND
static int VALGRIND_PRINTF_BACKTRACE(const char *format,...)
Definition: valgrind.h:5305
Vg_ClientRequest
Definition: valgrind.h:5156
@ VG_USERREQ__DESTROY_MEMPOOL
Definition: valgrind.h:5185
@ VG_USERREQ__MAP_IP_TO_SRCLOC
Definition: valgrind.h:5215
@ VG_USERREQ__LOAD_PDB_DEBUGINFO
Definition: valgrind.h:5212
@ VG_USERREQ__VEX_INIT_FOR_IRI
Definition: valgrind.h:5225
@ VG_USERREQ__PRINTF_BACKTRACE
Definition: valgrind.h:5201
@ VG_USERREQ__GDB_MONITOR_COMMAND
Definition: valgrind.h:5176
@ VG_USERREQ__MEMPOOL_ALLOC
Definition: valgrind.h:5186
@ VG_USERREQ__RESIZEINPLACE_BLOCK
Definition: valgrind.h:5181
@ VG_USERREQ__MALLOCLIKE_BLOCK
Definition: valgrind.h:5180
@ VG_USERREQ__COUNT_ERRORS
Definition: valgrind.h:5172
@ VG_USERREQ__STACK_REGISTER
Definition: valgrind.h:5207
@ VG_USERREQ__MEMPOOL_CHANGE
Definition: valgrind.h:5190
@ VG_USERREQ__PRINTF_VALIST_BY_REF
Definition: valgrind.h:5203
@ VG_USERREQ__RUNNING_ON_VALGRIND
Definition: valgrind.h:5156
@ VG_USERREQ__CLIENT_CALL0
Definition: valgrind.h:5164
@ VG_USERREQ__DISCARD_TRANSLATIONS
Definition: valgrind.h:5157
@ VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
Definition: valgrind.h:5204
@ VG_USERREQ__FREELIKE_BLOCK
Definition: valgrind.h:5182
@ VG_USERREQ__CREATE_MEMPOOL
Definition: valgrind.h:5184
@ VG_USERREQ__MOVE_MEMPOOL
Definition: valgrind.h:5189
@ VG_USERREQ__CLIENT_CALL3
Definition: valgrind.h:5167
@ VG_USERREQ__MEMPOOL_TRIM
Definition: valgrind.h:5188
@ VG_USERREQ__CLIENT_CALL2
Definition: valgrind.h:5166
@ VG_USERREQ__PRINTF
Definition: valgrind.h:5200
@ VG_USERREQ__CHANGE_ERR_DISABLEMENT
Definition: valgrind.h:5222
@ VG_USERREQ__STACK_CHANGE
Definition: valgrind.h:5209
@ VG_USERREQ__STACK_DEREGISTER
Definition: valgrind.h:5208
@ VG_USERREQ__MEMPOOL_FREE
Definition: valgrind.h:5187
@ VG_USERREQ__MEMPOOL_EXISTS
Definition: valgrind.h:5191
@ VG_USERREQ__CLIENT_CALL1
Definition: valgrind.h:5165
static int VALGRIND_PRINTF(const char *format,...)
Definition: valgrind.h:5267
#define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5)
Definition: valgrind.h:200