local_valgrind.h
Go to the documentation of this file.
1 /* -*- c -*-
2  ----------------------------------------------------------------
3 
4  Notice that the following BSD-style license applies to this one
5  file (valgrind.h) only. The rest of Valgrind is licensed under the
6  terms of the GNU General Public License, version 2, unless
7  otherwise indicated. See the COPYING file in the source
8  distribution for details.
9 
10  ----------------------------------------------------------------
11 
12  This file is part of Valgrind, a dynamic binary instrumentation
13  framework.
14 
15  Copyright (C) 2000-2013 Julian Seward. All rights reserved.
16 
17  Redistribution and use in source and binary forms, with or without
18  modification, are permitted provided that the following conditions
19  are met:
20 
21  1. Redistributions of source code must retain the above copyright
22  notice, this list of conditions and the following disclaimer.
23 
24  2. The origin of this software must not be misrepresented; you must
25  not claim that you wrote the original software. If you use this
26  software in a product, an acknowledgment in the product
27  documentation would be appreciated but is not required.
28 
29  3. Altered source versions must be plainly marked as such, and must
30  not be misrepresented as being the original software.
31 
32  4. The name of the author may not be used to endorse or promote
33  products derived from this software without specific prior written
34  permission.
35 
36  THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37  OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39  ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42  GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44  WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47 
48  ----------------------------------------------------------------
49 
50  Notice that the above BSD-style license applies to this one file
51  (valgrind.h) only. The entire rest of Valgrind is licensed under
52  the terms of the GNU General Public License, version 2. See the
53  COPYING file in the source distribution for details.
54 
55  ----------------------------------------------------------------
56 */
57 
58 
59 /* This file is for inclusion into client (your!) code.
60 
61  You can use these macros to manipulate and query Valgrind's
62  execution inside your own programs.
63 
64  The resulting executables will still run without Valgrind, just a
65  little bit more slowly than they otherwise would, but otherwise
66  unchanged. When not running on valgrind, each client request
67  consumes very few (eg. 7) instructions, so the resulting performance
68  loss is negligible unless you plan to execute client requests
69  millions of times per second. Nevertheless, if that is still a
70  problem, you can compile with the NVALGRIND symbol defined (gcc
71  -DNVALGRIND) so that client requests are not even compiled in. */
72 
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
75 
76 
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
80 
81 /* Specify Valgrind's version number, so that user code can
82  conditionally compile based on our version number. Note that these
83  were introduced at version 3.6 and so do not exist in version 3.5
84  or earlier. The recommended way to use them to check for "version
85  X.Y or later" is (eg)
86 
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88  && (__VALGRIND_MAJOR__ > 3 \
89  || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90 */
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 8
93 
94 
95 #include <stdarg.h>
96 
97 /* Nb: this file might be included in a file compiled with -ansi. So
98  we can't use C++ style "//" comments nor the "asm" keyword (instead
99  use "__asm__"). */
100 
101 /* Derive some tags indicating what the target platform is. Note
102  that in this file we're using the compiler's CPP symbols for
103  identifying architectures, which are different to the ones we use
104  within the rest of Valgrind. Note, __powerpc__ is active for both
105  32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106  latter (on Linux, that is).
107 
108  Misc note: how to find out what's predefined in gcc by default:
109  gcc -Wp,-dM somefile.c
110 */
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64_linux
119 #undef PLAT_arm_linux
120 #undef PLAT_s390x_linux
121 #undef PLAT_mips32_linux
122 #undef PLAT_mips64_linux
123 
124 
125 #if defined(__APPLE__) && defined(__i386__)
126 # define PLAT_x86_darwin 1
127 #elif defined(__APPLE__) && defined(__x86_64__)
128 # define PLAT_amd64_darwin 1
129 #elif defined(__MINGW32__) || defined(__CYGWIN32__) \
130  || (defined(_WIN32) && defined(_M_IX86))
131 # define PLAT_x86_win32 1
132 #elif defined(__MINGW64__) || (defined(_WIN64) && defined(_M_X64))
133 # define PLAT_amd64_win64 1
134 #elif defined(__linux__) && defined(__i386__)
135 # define PLAT_x86_linux 1
136 #elif defined(__linux__) && defined(__x86_64__)
137 # define PLAT_amd64_linux 1
138 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
139 # define PLAT_ppc32_linux 1
140 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
141 # define PLAT_ppc64_linux 1
142 #elif defined(__linux__) && defined(__arm__)
143 # define PLAT_arm_linux 1
144 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
145 # define PLAT_s390x_linux 1
146 #elif defined(__linux__) && defined(__mips__)
147 #if (__mips==64)
148 # define PLAT_mips64_linux 1
149 #else
150 # define PLAT_mips32_linux 1
151 #endif
152 #else
153 /* If we're not compiling for our target platform, don't generate
154  any inline asms. */
155 # if !defined(NVALGRIND)
156 # define NVALGRIND 1
157 # endif
158 #endif
159 
160 
161 /* ------------------------------------------------------------------ */
162 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
163 /* in here of use to end-users -- skip to the next section. */
164 /* ------------------------------------------------------------------ */
165 
166 /*
167  * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
168  * request. Accepts both pointers and integers as arguments.
169  *
170  * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
171  * client request that does not return a value.
172 
173  * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
174  * client request and whose value equals the client request result. Accepts
175  * both pointers and integers as arguments. Note that such calls are not
176  * necessarily pure functions -- they may have side effects.
177  */
178 
179 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
180  _zzq_request, _zzq_arg1, _zzq_arg2, \
181  _zzq_arg3, _zzq_arg4, _zzq_arg5) \
182  do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
183  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
184  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
185 
186 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
187  _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
188  do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
189  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
190  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
191 
192 #if defined(NVALGRIND)
193 
194 /* Define NVALGRIND to completely remove the Valgrind magic sequence
195  from the compiled code (analogous to NDEBUG's effects on
196  assert()) */
197 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
198  _zzq_default, _zzq_request, \
199  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
200  (_zzq_default)
201 
202 #else /* ! NVALGRIND */
203 
204 /* The following defines the magic code sequences which the JITter
205  spots and handles magically. Don't look too closely at them as
206  they will rot your brain.
207 
208  The assembly code sequences for all architectures is in this one
209  file. This is because this file must be stand-alone, and we don't
210  want to have multiple files.
211 
212  For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
213  value gets put in the return slot, so that everything works when
214  this is executed not under Valgrind. Args are passed in a memory
215  block, and so there's no intrinsic limit to the number that could
216  be passed, but it's currently five.
217 
218  The macro args are:
219  _zzq_rlval result lvalue
220  _zzq_default default value (result returned when running on real CPU)
221  _zzq_request request code
222  _zzq_arg1..5 request params
223 
224  The other two macros are used to support function wrapping, and are
225  a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
226  guest's NRADDR pseudo-register and whatever other information is
227  needed to safely run the call original from the wrapper: on
228  ppc64-linux, the R2 value at the divert point is also needed. This
229  information is abstracted into a user-visible type, OrigFn.
230 
231  VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
232  guest, but guarantees that the branch instruction will not be
233  redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
234  branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
235  complete inline asm, since it needs to be combined with more magic
236  inline asm stuff to be useful.
237 */
238 
239 /* ------------------------- x86-{linux,darwin} ---------------- */
240 
241 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
242  || (defined(PLAT_x86_win32) && defined(__GNUC__))
243 
244 typedef
245  struct {
246  unsigned int nraddr; /* where's the code? */
247  }
248  OrigFn;
249 
250 #define __SPECIAL_INSTRUCTION_PREAMBLE \
251  "roll $3, %%edi ; roll $13, %%edi\n\t" \
252  "roll $29, %%edi ; roll $19, %%edi\n\t"
253 
254 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
255  _zzq_default, _zzq_request, \
256  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
257  __extension__ \
258  ({volatile unsigned int _zzq_args[6]; \
259  volatile unsigned int _zzq_result; \
260  _zzq_args[0] = (unsigned int)(_zzq_request); \
261  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
262  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
263  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
264  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
265  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
266  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
267  /* %EDX = client_request ( %EAX ) */ \
268  "xchgl %%ebx,%%ebx" \
269  : "=d" (_zzq_result) \
270  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
271  : "cc", "memory" \
272  ); \
273  _zzq_result; \
274  })
275 
276 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
277  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
278  volatile unsigned int __addr; \
279  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
280  /* %EAX = guest_NRADDR */ \
281  "xchgl %%ecx,%%ecx" \
282  : "=a" (__addr) \
283  : \
284  : "cc", "memory" \
285  ); \
286  _zzq_orig->nraddr = __addr; \
287  }
288 
289 #define VALGRIND_CALL_NOREDIR_EAX \
290  __SPECIAL_INSTRUCTION_PREAMBLE \
291  /* call-noredir *%EAX */ \
292  "xchgl %%edx,%%edx\n\t"
293 
294 #define VALGRIND_VEX_INJECT_IR() \
295  do { \
296  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
297  "xchgl %%edi,%%edi\n\t" \
298  : : : "cc", "memory" \
299  ); \
300  } while (0)
301 
302 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
303 
304 /* ------------------------- x86-Win32 ------------------------- */
305 
306 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
307 
308 typedef
309  struct {
310  unsigned int nraddr; /* where's the code? */
311  }
312  OrigFn;
313 
314 #if defined(_MSC_VER)
315 
316 #define __SPECIAL_INSTRUCTION_PREAMBLE \
317  __asm rol edi, 3 __asm rol edi, 13 \
318  __asm rol edi, 29 __asm rol edi, 19
319 
320 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
321  _zzq_default, _zzq_request, \
322  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
323  valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
324  (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
325  (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
326  (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
327 
328 static __inline uintptr_t
329 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
330  uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
331  uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
332  uintptr_t _zzq_arg5)
333 {
334  volatile uintptr_t _zzq_args[6];
335  volatile unsigned int _zzq_result;
336  _zzq_args[0] = (uintptr_t)(_zzq_request);
337  _zzq_args[1] = (uintptr_t)(_zzq_arg1);
338  _zzq_args[2] = (uintptr_t)(_zzq_arg2);
339  _zzq_args[3] = (uintptr_t)(_zzq_arg3);
340  _zzq_args[4] = (uintptr_t)(_zzq_arg4);
341  _zzq_args[5] = (uintptr_t)(_zzq_arg5);
342  __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
343  __SPECIAL_INSTRUCTION_PREAMBLE
344  /* %EDX = client_request ( %EAX ) */
345  __asm xchg ebx,ebx
346  __asm mov _zzq_result, edx
347  }
348  return _zzq_result;
349 }
350 
351 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
352  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
353  volatile unsigned int __addr; \
354  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
355  /* %EAX = guest_NRADDR */ \
356  __asm xchg ecx,ecx \
357  __asm mov __addr, eax \
358  } \
359  _zzq_orig->nraddr = __addr; \
360  }
361 
362 #define VALGRIND_CALL_NOREDIR_EAX ERROR
363 
364 #define VALGRIND_VEX_INJECT_IR() \
365  do { \
366  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
367  __asm xchg edi,edi \
368  } \
369  } while (0)
370 
371 #else
372 #error Unsupported compiler.
373 #endif
374 
375 #endif /* PLAT_x86_win32 */
376 
377 /* ------------------------ amd64-{linux,darwin} --------------- */
378 
379 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
380 
381 typedef
382  struct {
383  unsigned long long int nraddr; /* where's the code? */
384  }
385  OrigFn;
386 
387 #define __SPECIAL_INSTRUCTION_PREAMBLE \
388  "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
389  "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
390 
391 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
392  _zzq_default, _zzq_request, \
393  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
394  __extension__ \
395  ({ volatile unsigned long long int _zzq_args[6]; \
396  volatile unsigned long long int _zzq_result; \
397  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
398  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
399  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
400  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
401  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
402  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
403  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
404  /* %RDX = client_request ( %RAX ) */ \
405  "xchgq %%rbx,%%rbx" \
406  : "=d" (_zzq_result) \
407  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
408  : "cc", "memory" \
409  ); \
410  _zzq_result; \
411  })
412 
413 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
414  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
415  volatile unsigned long long int __addr; \
416  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
417  /* %RAX = guest_NRADDR */ \
418  "xchgq %%rcx,%%rcx" \
419  : "=a" (__addr) \
420  : \
421  : "cc", "memory" \
422  ); \
423  _zzq_orig->nraddr = __addr; \
424  }
425 
426 #define VALGRIND_CALL_NOREDIR_RAX \
427  __SPECIAL_INSTRUCTION_PREAMBLE \
428  /* call-noredir *%RAX */ \
429  "xchgq %%rdx,%%rdx\n\t"
430 
431 #define VALGRIND_VEX_INJECT_IR() \
432  do { \
433  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
434  "xchgq %%rdi,%%rdi\n\t" \
435  : : : "cc", "memory" \
436  ); \
437  } while (0)
438 
439 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
440 
441 /* ------------------------ ppc32-linux ------------------------ */
442 
443 #if defined(PLAT_ppc32_linux)
444 
445 typedef
446  struct {
447  unsigned int nraddr; /* where's the code? */
448  }
449  OrigFn;
450 
451 #define __SPECIAL_INSTRUCTION_PREAMBLE \
452  "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
453  "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
454 
455 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
456  _zzq_default, _zzq_request, \
457  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
458  \
459  __extension__ \
460  ({ unsigned int _zzq_args[6]; \
461  unsigned int _zzq_result; \
462  unsigned int* _zzq_ptr; \
463  _zzq_args[0] = (unsigned int)(_zzq_request); \
464  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
465  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
466  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
467  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
468  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
469  _zzq_ptr = _zzq_args; \
470  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
471  "mr 4,%2\n\t" /*ptr*/ \
472  __SPECIAL_INSTRUCTION_PREAMBLE \
473  /* %R3 = client_request ( %R4 ) */ \
474  "or 1,1,1\n\t" \
475  "mr %0,3" /*result*/ \
476  : "=b" (_zzq_result) \
477  : "b" (_zzq_default), "b" (_zzq_ptr) \
478  : "cc", "memory", "r3", "r4"); \
479  _zzq_result; \
480  })
481 
482 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
483  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
484  unsigned int __addr; \
485  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
486  /* %R3 = guest_NRADDR */ \
487  "or 2,2,2\n\t" \
488  "mr %0,3" \
489  : "=b" (__addr) \
490  : \
491  : "cc", "memory", "r3" \
492  ); \
493  _zzq_orig->nraddr = __addr; \
494  }
495 
496 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
497  __SPECIAL_INSTRUCTION_PREAMBLE \
498  /* branch-and-link-to-noredir *%R11 */ \
499  "or 3,3,3\n\t"
500 
501 #define VALGRIND_VEX_INJECT_IR() \
502  do { \
503  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
504  "or 5,5,5\n\t" \
505  ); \
506  } while (0)
507 
508 #endif /* PLAT_ppc32_linux */
509 
510 /* ------------------------ ppc64-linux ------------------------ */
511 
512 #if defined(PLAT_ppc64_linux)
513 
514 typedef
515  struct {
516  unsigned long long int nraddr; /* where's the code? */
517  unsigned long long int r2; /* what tocptr do we need? */
518  }
519  OrigFn;
520 
521 #define __SPECIAL_INSTRUCTION_PREAMBLE \
522  "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
523  "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
524 
525 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
526  _zzq_default, _zzq_request, \
527  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
528  \
529  __extension__ \
530  ({ unsigned long long int _zzq_args[6]; \
531  unsigned long long int _zzq_result; \
532  unsigned long long int* _zzq_ptr; \
533  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
534  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
535  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
536  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
537  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
538  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
539  _zzq_ptr = _zzq_args; \
540  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
541  "mr 4,%2\n\t" /*ptr*/ \
542  __SPECIAL_INSTRUCTION_PREAMBLE \
543  /* %R3 = client_request ( %R4 ) */ \
544  "or 1,1,1\n\t" \
545  "mr %0,3" /*result*/ \
546  : "=b" (_zzq_result) \
547  : "b" (_zzq_default), "b" (_zzq_ptr) \
548  : "cc", "memory", "r3", "r4"); \
549  _zzq_result; \
550  })
551 
552 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
553  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
554  unsigned long long int __addr; \
555  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
556  /* %R3 = guest_NRADDR */ \
557  "or 2,2,2\n\t" \
558  "mr %0,3" \
559  : "=b" (__addr) \
560  : \
561  : "cc", "memory", "r3" \
562  ); \
563  _zzq_orig->nraddr = __addr; \
564  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
565  /* %R3 = guest_NRADDR_GPR2 */ \
566  "or 4,4,4\n\t" \
567  "mr %0,3" \
568  : "=b" (__addr) \
569  : \
570  : "cc", "memory", "r3" \
571  ); \
572  _zzq_orig->r2 = __addr; \
573  }
574 
575 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
576  __SPECIAL_INSTRUCTION_PREAMBLE \
577  /* branch-and-link-to-noredir *%R11 */ \
578  "or 3,3,3\n\t"
579 
580 #define VALGRIND_VEX_INJECT_IR() \
581  do { \
582  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
583  "or 5,5,5\n\t" \
584  ); \
585  } while (0)
586 
587 #endif /* PLAT_ppc64_linux */
588 
589 /* ------------------------- arm-linux ------------------------- */
590 
591 #if defined(PLAT_arm_linux)
592 
593 typedef
594  struct {
595  unsigned int nraddr; /* where's the code? */
596  }
597  OrigFn;
598 
599 #define __SPECIAL_INSTRUCTION_PREAMBLE \
600  "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
601  "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
602 
603 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
604  _zzq_default, _zzq_request, \
605  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
606  \
607  __extension__ \
608  ({volatile unsigned int _zzq_args[6]; \
609  volatile unsigned int _zzq_result; \
610  _zzq_args[0] = (unsigned int)(_zzq_request); \
611  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
612  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
613  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
614  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
615  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
616  __asm__ volatile("mov r3, %1\n\t" /*default*/ \
617  "mov r4, %2\n\t" /*ptr*/ \
618  __SPECIAL_INSTRUCTION_PREAMBLE \
619  /* R3 = client_request ( R4 ) */ \
620  "orr r10, r10, r10\n\t" \
621  "mov %0, r3" /*result*/ \
622  : "=r" (_zzq_result) \
623  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
624  : "cc","memory", "r3", "r4"); \
625  _zzq_result; \
626  })
627 
628 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
629  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
630  unsigned int __addr; \
631  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
632  /* R3 = guest_NRADDR */ \
633  "orr r11, r11, r11\n\t" \
634  "mov %0, r3" \
635  : "=r" (__addr) \
636  : \
637  : "cc", "memory", "r3" \
638  ); \
639  _zzq_orig->nraddr = __addr; \
640  }
641 
642 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
643  __SPECIAL_INSTRUCTION_PREAMBLE \
644  /* branch-and-link-to-noredir *%R4 */ \
645  "orr r12, r12, r12\n\t"
646 
647 #define VALGRIND_VEX_INJECT_IR() \
648  do { \
649  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
650  "orr r9, r9, r9\n\t" \
651  : : : "cc", "memory" \
652  ); \
653  } while (0)
654 
655 #endif /* PLAT_arm_linux */
656 
657 /* ------------------------ s390x-linux ------------------------ */
658 
659 #if defined(PLAT_s390x_linux)
660 
661 typedef
662  struct {
663  unsigned long long int nraddr; /* where's the code? */
664  }
665  OrigFn;
666 
667 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
668  * code. This detection is implemented in platform specific toIR.c
669  * (e.g. VEX/priv/guest_s390_decoder.c).
670  */
671 #define __SPECIAL_INSTRUCTION_PREAMBLE \
672  "lr 15,15\n\t" \
673  "lr 1,1\n\t" \
674  "lr 2,2\n\t" \
675  "lr 3,3\n\t"
676 
677 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
678 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
679 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
680 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
681 
682 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
683  _zzq_default, _zzq_request, \
684  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
685  __extension__ \
686  ({volatile unsigned long long int _zzq_args[6]; \
687  volatile unsigned long long int _zzq_result; \
688  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
689  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
690  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
691  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
692  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
693  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
694  __asm__ volatile(/* r2 = args */ \
695  "lgr 2,%1\n\t" \
696  /* r3 = default */ \
697  "lgr 3,%2\n\t" \
698  __SPECIAL_INSTRUCTION_PREAMBLE \
699  __CLIENT_REQUEST_CODE \
700  /* results = r3 */ \
701  "lgr %0, 3\n\t" \
702  : "=d" (_zzq_result) \
703  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
704  : "cc", "2", "3", "memory" \
705  ); \
706  _zzq_result; \
707  })
708 
709 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
710  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
711  volatile unsigned long long int __addr; \
712  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
713  __GET_NR_CONTEXT_CODE \
714  "lgr %0, 3\n\t" \
715  : "=a" (__addr) \
716  : \
717  : "cc", "3", "memory" \
718  ); \
719  _zzq_orig->nraddr = __addr; \
720  }
721 
722 #define VALGRIND_CALL_NOREDIR_R1 \
723  __SPECIAL_INSTRUCTION_PREAMBLE \
724  __CALL_NO_REDIR_CODE
725 
726 #define VALGRIND_VEX_INJECT_IR() \
727  do { \
728  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
729  __VEX_INJECT_IR_CODE); \
730  } while (0)
731 
732 #endif /* PLAT_s390x_linux */
733 
734 /* ------------------------- mips32-linux ---------------- */
735 
736 #if defined(PLAT_mips32_linux)
737 
738 typedef
739  struct {
740  unsigned int nraddr; /* where's the code? */
741  }
742  OrigFn;
743 
744 /* .word 0x342
745  * .word 0x742
746  * .word 0xC2
747  * .word 0x4C2*/
748 #define __SPECIAL_INSTRUCTION_PREAMBLE \
749  "srl $0, $0, 13\n\t" \
750  "srl $0, $0, 29\n\t" \
751  "srl $0, $0, 3\n\t" \
752  "srl $0, $0, 19\n\t"
753 
754 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
755  _zzq_default, _zzq_request, \
756  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
757  __extension__ \
758  ({ volatile unsigned int _zzq_args[6]; \
759  volatile unsigned int _zzq_result; \
760  _zzq_args[0] = (unsigned int)(_zzq_request); \
761  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
762  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
763  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
764  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
765  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
766  __asm__ volatile("move $11, %1\n\t" /*default*/ \
767  "move $12, %2\n\t" /*ptr*/ \
768  __SPECIAL_INSTRUCTION_PREAMBLE \
769  /* T3 = client_request ( T4 ) */ \
770  "or $13, $13, $13\n\t" \
771  "move %0, $11\n\t" /*result*/ \
772  : "=r" (_zzq_result) \
773  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
774  : "$11", "$12"); \
775  _zzq_result; \
776  })
777 
778 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
779  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
780  volatile unsigned int __addr; \
781  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
782  /* %t9 = guest_NRADDR */ \
783  "or $14, $14, $14\n\t" \
784  "move %0, $11" /*result*/ \
785  : "=r" (__addr) \
786  : \
787  : "$11" \
788  ); \
789  _zzq_orig->nraddr = __addr; \
790  }
791 
792 #define VALGRIND_CALL_NOREDIR_T9 \
793  __SPECIAL_INSTRUCTION_PREAMBLE \
794  /* call-noredir *%t9 */ \
795  "or $15, $15, $15\n\t"
796 
797 #define VALGRIND_VEX_INJECT_IR() \
798  do { \
799  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
800  "or $11, $11, $11\n\t" \
801  ); \
802  } while (0)
803 
804 
805 #endif /* PLAT_mips32_linux */
806 
807 /* ------------------------- mips64-linux ---------------- */
808 
809 #if defined(PLAT_mips64_linux)
810 
811 typedef
812  struct {
813  unsigned long long nraddr; /* where's the code? */
814  }
815  OrigFn;
816 
817 /* dsll $0,$0, 3
818  * dsll $0,$0, 13
819  * dsll $0,$0, 29
820  * dsll $0,$0, 19*/
821 #define __SPECIAL_INSTRUCTION_PREAMBLE \
822  "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
823  "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
824 
825 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
826  _zzq_default, _zzq_request, \
827  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
828  __extension__ \
829  ({ volatile unsigned long long int _zzq_args[6]; \
830  volatile unsigned long long int _zzq_result; \
831  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
832  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
833  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
834  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
835  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
836  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
837  __asm__ volatile("move $11, %1\n\t" /*default*/ \
838  "move $12, %2\n\t" /*ptr*/ \
839  __SPECIAL_INSTRUCTION_PREAMBLE \
840  /* $11 = client_request ( $12 ) */ \
841  "or $13, $13, $13\n\t" \
842  "move %0, $11\n\t" /*result*/ \
843  : "=r" (_zzq_result) \
844  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
845  : "$11", "$12"); \
846  _zzq_result; \
847  })
848 
849 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
850  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
851  volatile unsigned long long int __addr; \
852  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
853  /* $11 = guest_NRADDR */ \
854  "or $14, $14, $14\n\t" \
855  "move %0, $11" /*result*/ \
856  : "=r" (__addr) \
857  : \
858  : "$11"); \
859  _zzq_orig->nraddr = __addr; \
860  }
861 
862 #define VALGRIND_CALL_NOREDIR_T9 \
863  __SPECIAL_INSTRUCTION_PREAMBLE \
864  /* call-noredir $25 */ \
865  "or $15, $15, $15\n\t"
866 
867 #define VALGRIND_VEX_INJECT_IR() \
868  do { \
869  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
870  "or $11, $11, $11\n\t" \
871  ); \
872  } while (0)
873 
874 #endif /* PLAT_mips64_linux */
875 
876 /* Insert assembly code for other platforms here... */
877 
878 #endif /* NVALGRIND */
879 
880 
881 /* ------------------------------------------------------------------ */
882 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
883 /* ugly. It's the least-worst tradeoff I can think of. */
884 /* ------------------------------------------------------------------ */
885 
886 /* This section defines magic (a.k.a appalling-hack) macros for doing
887  guaranteed-no-redirection macros, so as to get from function
888  wrappers to the functions they are wrapping. The whole point is to
889  construct standard call sequences, but to do the call itself with a
890  special no-redirect call pseudo-instruction that the JIT
891  understands and handles specially. This section is long and
892  repetitious, and I can't see a way to make it shorter.
893 
894  The naming scheme is as follows:
895 
896  CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
897 
898  'W' stands for "word" and 'v' for "void". Hence there are
899  different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
900  and for each, the possibility of returning a word-typed result, or
901  no result.
902 */
903 
904 /* Use these to write the name of your wrapper. NOTE: duplicates
905  VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
906  the default behaviour equivalance class tag "0000" into the name.
907  See pub_tool_redir.h for details -- normally you don't need to
908  think about this, though. */
909 
910 /* Use an extra level of macroisation so as to ensure the soname/fnname
911  args are fully macro-expanded before pasting them together. */
912 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
913 
914 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
915  VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
916 
917 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
918  VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
919 
920 /* Use this macro from within a wrapper function to collect the
921  context (address and possibly other info) of the original function.
922  Once you have that you can then use it in one of the CALL_FN_
923  macros. The type of the argument _lval is OrigFn. */
924 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
925 
926 /* Also provide end-user facilities for function replacement, rather
927  than wrapping. A replacement function differs from a wrapper in
928  that it has no way to get hold of the original function being
929  called, and hence no way to call onwards to it. In a replacement
930  function, VALGRIND_GET_ORIG_FN always returns zero. */
931 
932 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
933  VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
934 
935 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
936  VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
937 
938 /* Derivatives of the main macros below, for calling functions
939  returning void. */
940 
941 #define CALL_FN_v_v(fnptr) \
942  do { volatile unsigned long _junk; \
943  CALL_FN_W_v(_junk,fnptr); } while (0)
944 
945 #define CALL_FN_v_W(fnptr, arg1) \
946  do { volatile unsigned long _junk; \
947  CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
948 
949 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
950  do { volatile unsigned long _junk; \
951  CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
952 
953 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
954  do { volatile unsigned long _junk; \
955  CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
956 
957 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
958  do { volatile unsigned long _junk; \
959  CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
960 
961 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
962  do { volatile unsigned long _junk; \
963  CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
964 
965 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
966  do { volatile unsigned long _junk; \
967  CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
968 
969 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
970  do { volatile unsigned long _junk; \
971  CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
972 
973 /* ------------------------- x86-{linux,darwin} ---------------- */
974 
975 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
976 
977 /* These regs are trashed by the hidden call. No need to mention eax
978  as gcc can already see that, plus causes gcc to bomb. */
979 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
980 
981 /* Macros to save and align the stack before making a function
982  call and restore it afterwards as gcc may not keep the stack
983  pointer aligned if it doesn't realise calls are being made
984  to other functions. */
985 
986 #define VALGRIND_ALIGN_STACK \
987  "movl %%esp,%%edi\n\t" \
988  "andl $0xfffffff0,%%esp\n\t"
989 #define VALGRIND_RESTORE_STACK \
990  "movl %%edi,%%esp\n\t"
991 
992 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
993  long) == 4. */
994 
995 #define CALL_FN_W_v(lval, orig) \
996  do { \
997  volatile OrigFn _orig = (orig); \
998  volatile unsigned long _argvec[1]; \
999  volatile unsigned long _res; \
1000  _argvec[0] = (unsigned long)_orig.nraddr; \
1001  __asm__ volatile( \
1002  VALGRIND_ALIGN_STACK \
1003  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1004  VALGRIND_CALL_NOREDIR_EAX \
1005  VALGRIND_RESTORE_STACK \
1006  : /*out*/ "=a" (_res) \
1007  : /*in*/ "a" (&_argvec[0]) \
1008  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1009  ); \
1010  lval = (__typeof__(lval)) _res; \
1011  } while (0)
1012 
1013 #define CALL_FN_W_W(lval, orig, arg1) \
1014  do { \
1015  volatile OrigFn _orig = (orig); \
1016  volatile unsigned long _argvec[2]; \
1017  volatile unsigned long _res; \
1018  _argvec[0] = (unsigned long)_orig.nraddr; \
1019  _argvec[1] = (unsigned long)(arg1); \
1020  __asm__ volatile( \
1021  VALGRIND_ALIGN_STACK \
1022  "subl $12, %%esp\n\t" \
1023  "pushl 4(%%eax)\n\t" \
1024  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1025  VALGRIND_CALL_NOREDIR_EAX \
1026  VALGRIND_RESTORE_STACK \
1027  : /*out*/ "=a" (_res) \
1028  : /*in*/ "a" (&_argvec[0]) \
1029  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1030  ); \
1031  lval = (__typeof__(lval)) _res; \
1032  } while (0)
1033 
1034 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1035  do { \
1036  volatile OrigFn _orig = (orig); \
1037  volatile unsigned long _argvec[3]; \
1038  volatile unsigned long _res; \
1039  _argvec[0] = (unsigned long)_orig.nraddr; \
1040  _argvec[1] = (unsigned long)(arg1); \
1041  _argvec[2] = (unsigned long)(arg2); \
1042  __asm__ volatile( \
1043  VALGRIND_ALIGN_STACK \
1044  "subl $8, %%esp\n\t" \
1045  "pushl 8(%%eax)\n\t" \
1046  "pushl 4(%%eax)\n\t" \
1047  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1048  VALGRIND_CALL_NOREDIR_EAX \
1049  VALGRIND_RESTORE_STACK \
1050  : /*out*/ "=a" (_res) \
1051  : /*in*/ "a" (&_argvec[0]) \
1052  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1053  ); \
1054  lval = (__typeof__(lval)) _res; \
1055  } while (0)
1056 
1057 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1058  do { \
1059  volatile OrigFn _orig = (orig); \
1060  volatile unsigned long _argvec[4]; \
1061  volatile unsigned long _res; \
1062  _argvec[0] = (unsigned long)_orig.nraddr; \
1063  _argvec[1] = (unsigned long)(arg1); \
1064  _argvec[2] = (unsigned long)(arg2); \
1065  _argvec[3] = (unsigned long)(arg3); \
1066  __asm__ volatile( \
1067  VALGRIND_ALIGN_STACK \
1068  "subl $4, %%esp\n\t" \
1069  "pushl 12(%%eax)\n\t" \
1070  "pushl 8(%%eax)\n\t" \
1071  "pushl 4(%%eax)\n\t" \
1072  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1073  VALGRIND_CALL_NOREDIR_EAX \
1074  VALGRIND_RESTORE_STACK \
1075  : /*out*/ "=a" (_res) \
1076  : /*in*/ "a" (&_argvec[0]) \
1077  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1078  ); \
1079  lval = (__typeof__(lval)) _res; \
1080  } while (0)
1081 
1082 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1083  do { \
1084  volatile OrigFn _orig = (orig); \
1085  volatile unsigned long _argvec[5]; \
1086  volatile unsigned long _res; \
1087  _argvec[0] = (unsigned long)_orig.nraddr; \
1088  _argvec[1] = (unsigned long)(arg1); \
1089  _argvec[2] = (unsigned long)(arg2); \
1090  _argvec[3] = (unsigned long)(arg3); \
1091  _argvec[4] = (unsigned long)(arg4); \
1092  __asm__ volatile( \
1093  VALGRIND_ALIGN_STACK \
1094  "pushl 16(%%eax)\n\t" \
1095  "pushl 12(%%eax)\n\t" \
1096  "pushl 8(%%eax)\n\t" \
1097  "pushl 4(%%eax)\n\t" \
1098  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1099  VALGRIND_CALL_NOREDIR_EAX \
1100  VALGRIND_RESTORE_STACK \
1101  : /*out*/ "=a" (_res) \
1102  : /*in*/ "a" (&_argvec[0]) \
1103  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1104  ); \
1105  lval = (__typeof__(lval)) _res; \
1106  } while (0)
1107 
1108 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1109  do { \
1110  volatile OrigFn _orig = (orig); \
1111  volatile unsigned long _argvec[6]; \
1112  volatile unsigned long _res; \
1113  _argvec[0] = (unsigned long)_orig.nraddr; \
1114  _argvec[1] = (unsigned long)(arg1); \
1115  _argvec[2] = (unsigned long)(arg2); \
1116  _argvec[3] = (unsigned long)(arg3); \
1117  _argvec[4] = (unsigned long)(arg4); \
1118  _argvec[5] = (unsigned long)(arg5); \
1119  __asm__ volatile( \
1120  VALGRIND_ALIGN_STACK \
1121  "subl $12, %%esp\n\t" \
1122  "pushl 20(%%eax)\n\t" \
1123  "pushl 16(%%eax)\n\t" \
1124  "pushl 12(%%eax)\n\t" \
1125  "pushl 8(%%eax)\n\t" \
1126  "pushl 4(%%eax)\n\t" \
1127  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1128  VALGRIND_CALL_NOREDIR_EAX \
1129  VALGRIND_RESTORE_STACK \
1130  : /*out*/ "=a" (_res) \
1131  : /*in*/ "a" (&_argvec[0]) \
1132  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1133  ); \
1134  lval = (__typeof__(lval)) _res; \
1135  } while (0)
1136 
1137 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1138  do { \
1139  volatile OrigFn _orig = (orig); \
1140  volatile unsigned long _argvec[7]; \
1141  volatile unsigned long _res; \
1142  _argvec[0] = (unsigned long)_orig.nraddr; \
1143  _argvec[1] = (unsigned long)(arg1); \
1144  _argvec[2] = (unsigned long)(arg2); \
1145  _argvec[3] = (unsigned long)(arg3); \
1146  _argvec[4] = (unsigned long)(arg4); \
1147  _argvec[5] = (unsigned long)(arg5); \
1148  _argvec[6] = (unsigned long)(arg6); \
1149  __asm__ volatile( \
1150  VALGRIND_ALIGN_STACK \
1151  "subl $8, %%esp\n\t" \
1152  "pushl 24(%%eax)\n\t" \
1153  "pushl 20(%%eax)\n\t" \
1154  "pushl 16(%%eax)\n\t" \
1155  "pushl 12(%%eax)\n\t" \
1156  "pushl 8(%%eax)\n\t" \
1157  "pushl 4(%%eax)\n\t" \
1158  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1159  VALGRIND_CALL_NOREDIR_EAX \
1160  VALGRIND_RESTORE_STACK \
1161  : /*out*/ "=a" (_res) \
1162  : /*in*/ "a" (&_argvec[0]) \
1163  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1164  ); \
1165  lval = (__typeof__(lval)) _res; \
1166  } while (0)
1167 
1168 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1169  arg7) \
1170  do { \
1171  volatile OrigFn _orig = (orig); \
1172  volatile unsigned long _argvec[8]; \
1173  volatile unsigned long _res; \
1174  _argvec[0] = (unsigned long)_orig.nraddr; \
1175  _argvec[1] = (unsigned long)(arg1); \
1176  _argvec[2] = (unsigned long)(arg2); \
1177  _argvec[3] = (unsigned long)(arg3); \
1178  _argvec[4] = (unsigned long)(arg4); \
1179  _argvec[5] = (unsigned long)(arg5); \
1180  _argvec[6] = (unsigned long)(arg6); \
1181  _argvec[7] = (unsigned long)(arg7); \
1182  __asm__ volatile( \
1183  VALGRIND_ALIGN_STACK \
1184  "subl $4, %%esp\n\t" \
1185  "pushl 28(%%eax)\n\t" \
1186  "pushl 24(%%eax)\n\t" \
1187  "pushl 20(%%eax)\n\t" \
1188  "pushl 16(%%eax)\n\t" \
1189  "pushl 12(%%eax)\n\t" \
1190  "pushl 8(%%eax)\n\t" \
1191  "pushl 4(%%eax)\n\t" \
1192  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1193  VALGRIND_CALL_NOREDIR_EAX \
1194  VALGRIND_RESTORE_STACK \
1195  : /*out*/ "=a" (_res) \
1196  : /*in*/ "a" (&_argvec[0]) \
1197  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1198  ); \
1199  lval = (__typeof__(lval)) _res; \
1200  } while (0)
1201 
1202 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1203  arg7,arg8) \
1204  do { \
1205  volatile OrigFn _orig = (orig); \
1206  volatile unsigned long _argvec[9]; \
1207  volatile unsigned long _res; \
1208  _argvec[0] = (unsigned long)_orig.nraddr; \
1209  _argvec[1] = (unsigned long)(arg1); \
1210  _argvec[2] = (unsigned long)(arg2); \
1211  _argvec[3] = (unsigned long)(arg3); \
1212  _argvec[4] = (unsigned long)(arg4); \
1213  _argvec[5] = (unsigned long)(arg5); \
1214  _argvec[6] = (unsigned long)(arg6); \
1215  _argvec[7] = (unsigned long)(arg7); \
1216  _argvec[8] = (unsigned long)(arg8); \
1217  __asm__ volatile( \
1218  VALGRIND_ALIGN_STACK \
1219  "pushl 32(%%eax)\n\t" \
1220  "pushl 28(%%eax)\n\t" \
1221  "pushl 24(%%eax)\n\t" \
1222  "pushl 20(%%eax)\n\t" \
1223  "pushl 16(%%eax)\n\t" \
1224  "pushl 12(%%eax)\n\t" \
1225  "pushl 8(%%eax)\n\t" \
1226  "pushl 4(%%eax)\n\t" \
1227  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1228  VALGRIND_CALL_NOREDIR_EAX \
1229  VALGRIND_RESTORE_STACK \
1230  : /*out*/ "=a" (_res) \
1231  : /*in*/ "a" (&_argvec[0]) \
1232  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1233  ); \
1234  lval = (__typeof__(lval)) _res; \
1235  } while (0)
1236 
1237 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1238  arg7,arg8,arg9) \
1239  do { \
1240  volatile OrigFn _orig = (orig); \
1241  volatile unsigned long _argvec[10]; \
1242  volatile unsigned long _res; \
1243  _argvec[0] = (unsigned long)_orig.nraddr; \
1244  _argvec[1] = (unsigned long)(arg1); \
1245  _argvec[2] = (unsigned long)(arg2); \
1246  _argvec[3] = (unsigned long)(arg3); \
1247  _argvec[4] = (unsigned long)(arg4); \
1248  _argvec[5] = (unsigned long)(arg5); \
1249  _argvec[6] = (unsigned long)(arg6); \
1250  _argvec[7] = (unsigned long)(arg7); \
1251  _argvec[8] = (unsigned long)(arg8); \
1252  _argvec[9] = (unsigned long)(arg9); \
1253  __asm__ volatile( \
1254  VALGRIND_ALIGN_STACK \
1255  "subl $12, %%esp\n\t" \
1256  "pushl 36(%%eax)\n\t" \
1257  "pushl 32(%%eax)\n\t" \
1258  "pushl 28(%%eax)\n\t" \
1259  "pushl 24(%%eax)\n\t" \
1260  "pushl 20(%%eax)\n\t" \
1261  "pushl 16(%%eax)\n\t" \
1262  "pushl 12(%%eax)\n\t" \
1263  "pushl 8(%%eax)\n\t" \
1264  "pushl 4(%%eax)\n\t" \
1265  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1266  VALGRIND_CALL_NOREDIR_EAX \
1267  VALGRIND_RESTORE_STACK \
1268  : /*out*/ "=a" (_res) \
1269  : /*in*/ "a" (&_argvec[0]) \
1270  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1271  ); \
1272  lval = (__typeof__(lval)) _res; \
1273  } while (0)
1274 
1275 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1276  arg7,arg8,arg9,arg10) \
1277  do { \
1278  volatile OrigFn _orig = (orig); \
1279  volatile unsigned long _argvec[11]; \
1280  volatile unsigned long _res; \
1281  _argvec[0] = (unsigned long)_orig.nraddr; \
1282  _argvec[1] = (unsigned long)(arg1); \
1283  _argvec[2] = (unsigned long)(arg2); \
1284  _argvec[3] = (unsigned long)(arg3); \
1285  _argvec[4] = (unsigned long)(arg4); \
1286  _argvec[5] = (unsigned long)(arg5); \
1287  _argvec[6] = (unsigned long)(arg6); \
1288  _argvec[7] = (unsigned long)(arg7); \
1289  _argvec[8] = (unsigned long)(arg8); \
1290  _argvec[9] = (unsigned long)(arg9); \
1291  _argvec[10] = (unsigned long)(arg10); \
1292  __asm__ volatile( \
1293  VALGRIND_ALIGN_STACK \
1294  "subl $8, %%esp\n\t" \
1295  "pushl 40(%%eax)\n\t" \
1296  "pushl 36(%%eax)\n\t" \
1297  "pushl 32(%%eax)\n\t" \
1298  "pushl 28(%%eax)\n\t" \
1299  "pushl 24(%%eax)\n\t" \
1300  "pushl 20(%%eax)\n\t" \
1301  "pushl 16(%%eax)\n\t" \
1302  "pushl 12(%%eax)\n\t" \
1303  "pushl 8(%%eax)\n\t" \
1304  "pushl 4(%%eax)\n\t" \
1305  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1306  VALGRIND_CALL_NOREDIR_EAX \
1307  VALGRIND_RESTORE_STACK \
1308  : /*out*/ "=a" (_res) \
1309  : /*in*/ "a" (&_argvec[0]) \
1310  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1311  ); \
1312  lval = (__typeof__(lval)) _res; \
1313  } while (0)
1314 
1315 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1316  arg6,arg7,arg8,arg9,arg10, \
1317  arg11) \
1318  do { \
1319  volatile OrigFn _orig = (orig); \
1320  volatile unsigned long _argvec[12]; \
1321  volatile unsigned long _res; \
1322  _argvec[0] = (unsigned long)_orig.nraddr; \
1323  _argvec[1] = (unsigned long)(arg1); \
1324  _argvec[2] = (unsigned long)(arg2); \
1325  _argvec[3] = (unsigned long)(arg3); \
1326  _argvec[4] = (unsigned long)(arg4); \
1327  _argvec[5] = (unsigned long)(arg5); \
1328  _argvec[6] = (unsigned long)(arg6); \
1329  _argvec[7] = (unsigned long)(arg7); \
1330  _argvec[8] = (unsigned long)(arg8); \
1331  _argvec[9] = (unsigned long)(arg9); \
1332  _argvec[10] = (unsigned long)(arg10); \
1333  _argvec[11] = (unsigned long)(arg11); \
1334  __asm__ volatile( \
1335  VALGRIND_ALIGN_STACK \
1336  "subl $4, %%esp\n\t" \
1337  "pushl 44(%%eax)\n\t" \
1338  "pushl 40(%%eax)\n\t" \
1339  "pushl 36(%%eax)\n\t" \
1340  "pushl 32(%%eax)\n\t" \
1341  "pushl 28(%%eax)\n\t" \
1342  "pushl 24(%%eax)\n\t" \
1343  "pushl 20(%%eax)\n\t" \
1344  "pushl 16(%%eax)\n\t" \
1345  "pushl 12(%%eax)\n\t" \
1346  "pushl 8(%%eax)\n\t" \
1347  "pushl 4(%%eax)\n\t" \
1348  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1349  VALGRIND_CALL_NOREDIR_EAX \
1350  VALGRIND_RESTORE_STACK \
1351  : /*out*/ "=a" (_res) \
1352  : /*in*/ "a" (&_argvec[0]) \
1353  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1354  ); \
1355  lval = (__typeof__(lval)) _res; \
1356  } while (0)
1357 
1358 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1359  arg6,arg7,arg8,arg9,arg10, \
1360  arg11,arg12) \
1361  do { \
1362  volatile OrigFn _orig = (orig); \
1363  volatile unsigned long _argvec[13]; \
1364  volatile unsigned long _res; \
1365  _argvec[0] = (unsigned long)_orig.nraddr; \
1366  _argvec[1] = (unsigned long)(arg1); \
1367  _argvec[2] = (unsigned long)(arg2); \
1368  _argvec[3] = (unsigned long)(arg3); \
1369  _argvec[4] = (unsigned long)(arg4); \
1370  _argvec[5] = (unsigned long)(arg5); \
1371  _argvec[6] = (unsigned long)(arg6); \
1372  _argvec[7] = (unsigned long)(arg7); \
1373  _argvec[8] = (unsigned long)(arg8); \
1374  _argvec[9] = (unsigned long)(arg9); \
1375  _argvec[10] = (unsigned long)(arg10); \
1376  _argvec[11] = (unsigned long)(arg11); \
1377  _argvec[12] = (unsigned long)(arg12); \
1378  __asm__ volatile( \
1379  VALGRIND_ALIGN_STACK \
1380  "pushl 48(%%eax)\n\t" \
1381  "pushl 44(%%eax)\n\t" \
1382  "pushl 40(%%eax)\n\t" \
1383  "pushl 36(%%eax)\n\t" \
1384  "pushl 32(%%eax)\n\t" \
1385  "pushl 28(%%eax)\n\t" \
1386  "pushl 24(%%eax)\n\t" \
1387  "pushl 20(%%eax)\n\t" \
1388  "pushl 16(%%eax)\n\t" \
1389  "pushl 12(%%eax)\n\t" \
1390  "pushl 8(%%eax)\n\t" \
1391  "pushl 4(%%eax)\n\t" \
1392  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1393  VALGRIND_CALL_NOREDIR_EAX \
1394  VALGRIND_RESTORE_STACK \
1395  : /*out*/ "=a" (_res) \
1396  : /*in*/ "a" (&_argvec[0]) \
1397  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1398  ); \
1399  lval = (__typeof__(lval)) _res; \
1400  } while (0)
1401 
1402 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1403 
1404 /* ------------------------ amd64-{linux,darwin} --------------- */
1405 
1406 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1407 
1408 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1409 
1410 /* These regs are trashed by the hidden call. */
1411 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1412  "rdi", "r8", "r9", "r10", "r11"
1413 
1414 /* This is all pretty complex. It's so as to make stack unwinding
1415  work reliably. See bug 243270. The basic problem is the sub and
1416  add of 128 of %rsp in all of the following macros. If gcc believes
1417  the CFA is in %rsp, then unwinding may fail, because what's at the
1418  CFA is not what gcc "expected" when it constructs the CFIs for the
1419  places where the macros are instantiated.
1420 
1421  But we can't just add a CFI annotation to increase the CFA offset
1422  by 128, to match the sub of 128 from %rsp, because we don't know
1423  whether gcc has chosen %rsp as the CFA at that point, or whether it
1424  has chosen some other register (eg, %rbp). In the latter case,
1425  adding a CFI annotation to change the CFA offset is simply wrong.
1426 
1427  So the solution is to get hold of the CFA using
1428  __builtin_dwarf_cfa(), put it in a known register, and add a
1429  CFI annotation to say what the register is. We choose %rbp for
1430  this (perhaps perversely), because:
1431 
1432  (1) %rbp is already subject to unwinding. If a new register was
1433  chosen then the unwinder would have to unwind it in all stack
1434  traces, which is expensive, and
1435 
1436  (2) %rbp is already subject to precise exception updates in the
1437  JIT. If a new register was chosen, we'd have to have precise
1438  exceptions for it too, which reduces performance of the
1439  generated code.
1440 
1441  However .. one extra complication. We can't just whack the result
1442  of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1443  list of trashed registers at the end of the inline assembly
1444  fragments; gcc won't allow %rbp to appear in that list. Hence
1445  instead we need to stash %rbp in %r15 for the duration of the asm,
1446  and say that %r15 is trashed instead. gcc seems happy to go with
1447  that.
1448 
1449  Oh .. and this all needs to be conditionalised so that it is
1450  unchanged from before this commit, when compiled with older gccs
1451  that don't support __builtin_dwarf_cfa. Furthermore, since
1452  this header file is freestanding, it has to be independent of
1453  config.h, and so the following conditionalisation cannot depend on
1454  configure time checks.
1455 
1456  Although it's not clear from
1457  'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1458  this expression excludes Darwin.
1459  .cfi directives in Darwin assembly appear to be completely
1460  different and I haven't investigated how they work.
1461 
1462  For even more entertainment value, note we have to use the
1463  completely undocumented __builtin_dwarf_cfa(), which appears to
1464  really compute the CFA, whereas __builtin_frame_address(0) claims
1465  to but actually doesn't. See
1466  https://bugs.kde.org/show_bug.cgi?id=243270#c47
1467 */
1468 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1469 # define __FRAME_POINTER \
1470  ,"r"(__builtin_dwarf_cfa())
1471 # define VALGRIND_CFI_PROLOGUE \
1472  "movq %%rbp, %%r15\n\t" \
1473  "movq %2, %%rbp\n\t" \
1474  ".cfi_remember_state\n\t" \
1475  ".cfi_def_cfa rbp, 0\n\t"
1476 # define VALGRIND_CFI_EPILOGUE \
1477  "movq %%r15, %%rbp\n\t" \
1478  ".cfi_restore_state\n\t"
1479 #else
1480 # define __FRAME_POINTER
1481 # define VALGRIND_CFI_PROLOGUE
1482 # define VALGRIND_CFI_EPILOGUE
1483 #endif
1484 
1485 /* Macros to save and align the stack before making a function
1486  call and restore it afterwards as gcc may not keep the stack
1487  pointer aligned if it doesn't realise calls are being made
1488  to other functions. */
1489 
1490 #define VALGRIND_ALIGN_STACK \
1491  "movq %%rsp,%%r14\n\t" \
1492  "andq $0xfffffffffffffff0,%%rsp\n\t"
1493 #define VALGRIND_RESTORE_STACK \
1494  "movq %%r14,%%rsp\n\t"
1495 
1496 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1497  long) == 8. */
1498 
1499 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1500  macros. In order not to trash the stack redzone, we need to drop
1501  %rsp by 128 before the hidden call, and restore afterwards. The
1502  nastyness is that it is only by luck that the stack still appears
1503  to be unwindable during the hidden call - since then the behaviour
1504  of any routine using this macro does not match what the CFI data
1505  says. Sigh.
1506 
1507  Why is this important? Imagine that a wrapper has a stack
1508  allocated local, and passes to the hidden call, a pointer to it.
1509  Because gcc does not know about the hidden call, it may allocate
1510  that local in the redzone. Unfortunately the hidden call may then
1511  trash it before it comes to use it. So we must step clear of the
1512  redzone, for the duration of the hidden call, to make it safe.
1513 
1514  Probably the same problem afflicts the other redzone-style ABIs too
1515  (ppc64-linux); but for those, the stack is
1516  self describing (none of this CFI nonsense) so at least messing
1517  with the stack pointer doesn't give a danger of non-unwindable
1518  stack. */
1519 
1520 #define CALL_FN_W_v(lval, orig) \
1521  do { \
1522  volatile OrigFn _orig = (orig); \
1523  volatile unsigned long _argvec[1]; \
1524  volatile unsigned long _res; \
1525  _argvec[0] = (unsigned long)_orig.nraddr; \
1526  __asm__ volatile( \
1527  VALGRIND_CFI_PROLOGUE \
1528  VALGRIND_ALIGN_STACK \
1529  "subq $128,%%rsp\n\t" \
1530  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1531  VALGRIND_CALL_NOREDIR_RAX \
1532  VALGRIND_RESTORE_STACK \
1533  VALGRIND_CFI_EPILOGUE \
1534  : /*out*/ "=a" (_res) \
1535  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1536  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1537  ); \
1538  lval = (__typeof__(lval)) _res; \
1539  } while (0)
1540 
1541 #define CALL_FN_W_W(lval, orig, arg1) \
1542  do { \
1543  volatile OrigFn _orig = (orig); \
1544  volatile unsigned long _argvec[2]; \
1545  volatile unsigned long _res; \
1546  _argvec[0] = (unsigned long)_orig.nraddr; \
1547  _argvec[1] = (unsigned long)(arg1); \
1548  __asm__ volatile( \
1549  VALGRIND_CFI_PROLOGUE \
1550  VALGRIND_ALIGN_STACK \
1551  "subq $128,%%rsp\n\t" \
1552  "movq 8(%%rax), %%rdi\n\t" \
1553  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1554  VALGRIND_CALL_NOREDIR_RAX \
1555  VALGRIND_RESTORE_STACK \
1556  VALGRIND_CFI_EPILOGUE \
1557  : /*out*/ "=a" (_res) \
1558  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1559  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1560  ); \
1561  lval = (__typeof__(lval)) _res; \
1562  } while (0)
1563 
1564 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1565  do { \
1566  volatile OrigFn _orig = (orig); \
1567  volatile unsigned long _argvec[3]; \
1568  volatile unsigned long _res; \
1569  _argvec[0] = (unsigned long)_orig.nraddr; \
1570  _argvec[1] = (unsigned long)(arg1); \
1571  _argvec[2] = (unsigned long)(arg2); \
1572  __asm__ volatile( \
1573  VALGRIND_CFI_PROLOGUE \
1574  VALGRIND_ALIGN_STACK \
1575  "subq $128,%%rsp\n\t" \
1576  "movq 16(%%rax), %%rsi\n\t" \
1577  "movq 8(%%rax), %%rdi\n\t" \
1578  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1579  VALGRIND_CALL_NOREDIR_RAX \
1580  VALGRIND_RESTORE_STACK \
1581  VALGRIND_CFI_EPILOGUE \
1582  : /*out*/ "=a" (_res) \
1583  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1584  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1585  ); \
1586  lval = (__typeof__(lval)) _res; \
1587  } while (0)
1588 
1589 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1590  do { \
1591  volatile OrigFn _orig = (orig); \
1592  volatile unsigned long _argvec[4]; \
1593  volatile unsigned long _res; \
1594  _argvec[0] = (unsigned long)_orig.nraddr; \
1595  _argvec[1] = (unsigned long)(arg1); \
1596  _argvec[2] = (unsigned long)(arg2); \
1597  _argvec[3] = (unsigned long)(arg3); \
1598  __asm__ volatile( \
1599  VALGRIND_CFI_PROLOGUE \
1600  VALGRIND_ALIGN_STACK \
1601  "subq $128,%%rsp\n\t" \
1602  "movq 24(%%rax), %%rdx\n\t" \
1603  "movq 16(%%rax), %%rsi\n\t" \
1604  "movq 8(%%rax), %%rdi\n\t" \
1605  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1606  VALGRIND_CALL_NOREDIR_RAX \
1607  VALGRIND_RESTORE_STACK \
1608  VALGRIND_CFI_EPILOGUE \
1609  : /*out*/ "=a" (_res) \
1610  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1611  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1612  ); \
1613  lval = (__typeof__(lval)) _res; \
1614  } while (0)
1615 
1616 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1617  do { \
1618  volatile OrigFn _orig = (orig); \
1619  volatile unsigned long _argvec[5]; \
1620  volatile unsigned long _res; \
1621  _argvec[0] = (unsigned long)_orig.nraddr; \
1622  _argvec[1] = (unsigned long)(arg1); \
1623  _argvec[2] = (unsigned long)(arg2); \
1624  _argvec[3] = (unsigned long)(arg3); \
1625  _argvec[4] = (unsigned long)(arg4); \
1626  __asm__ volatile( \
1627  VALGRIND_CFI_PROLOGUE \
1628  VALGRIND_ALIGN_STACK \
1629  "subq $128,%%rsp\n\t" \
1630  "movq 32(%%rax), %%rcx\n\t" \
1631  "movq 24(%%rax), %%rdx\n\t" \
1632  "movq 16(%%rax), %%rsi\n\t" \
1633  "movq 8(%%rax), %%rdi\n\t" \
1634  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1635  VALGRIND_CALL_NOREDIR_RAX \
1636  VALGRIND_RESTORE_STACK \
1637  VALGRIND_CFI_EPILOGUE \
1638  : /*out*/ "=a" (_res) \
1639  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1640  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1641  ); \
1642  lval = (__typeof__(lval)) _res; \
1643  } while (0)
1644 
1645 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1646  do { \
1647  volatile OrigFn _orig = (orig); \
1648  volatile unsigned long _argvec[6]; \
1649  volatile unsigned long _res; \
1650  _argvec[0] = (unsigned long)_orig.nraddr; \
1651  _argvec[1] = (unsigned long)(arg1); \
1652  _argvec[2] = (unsigned long)(arg2); \
1653  _argvec[3] = (unsigned long)(arg3); \
1654  _argvec[4] = (unsigned long)(arg4); \
1655  _argvec[5] = (unsigned long)(arg5); \
1656  __asm__ volatile( \
1657  VALGRIND_CFI_PROLOGUE \
1658  VALGRIND_ALIGN_STACK \
1659  "subq $128,%%rsp\n\t" \
1660  "movq 40(%%rax), %%r8\n\t" \
1661  "movq 32(%%rax), %%rcx\n\t" \
1662  "movq 24(%%rax), %%rdx\n\t" \
1663  "movq 16(%%rax), %%rsi\n\t" \
1664  "movq 8(%%rax), %%rdi\n\t" \
1665  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1666  VALGRIND_CALL_NOREDIR_RAX \
1667  VALGRIND_RESTORE_STACK \
1668  VALGRIND_CFI_EPILOGUE \
1669  : /*out*/ "=a" (_res) \
1670  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1671  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1672  ); \
1673  lval = (__typeof__(lval)) _res; \
1674  } while (0)
1675 
1676 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1677  do { \
1678  volatile OrigFn _orig = (orig); \
1679  volatile unsigned long _argvec[7]; \
1680  volatile unsigned long _res; \
1681  _argvec[0] = (unsigned long)_orig.nraddr; \
1682  _argvec[1] = (unsigned long)(arg1); \
1683  _argvec[2] = (unsigned long)(arg2); \
1684  _argvec[3] = (unsigned long)(arg3); \
1685  _argvec[4] = (unsigned long)(arg4); \
1686  _argvec[5] = (unsigned long)(arg5); \
1687  _argvec[6] = (unsigned long)(arg6); \
1688  __asm__ volatile( \
1689  VALGRIND_CFI_PROLOGUE \
1690  VALGRIND_ALIGN_STACK \
1691  "subq $128,%%rsp\n\t" \
1692  "movq 48(%%rax), %%r9\n\t" \
1693  "movq 40(%%rax), %%r8\n\t" \
1694  "movq 32(%%rax), %%rcx\n\t" \
1695  "movq 24(%%rax), %%rdx\n\t" \
1696  "movq 16(%%rax), %%rsi\n\t" \
1697  "movq 8(%%rax), %%rdi\n\t" \
1698  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1699  VALGRIND_CALL_NOREDIR_RAX \
1700  VALGRIND_RESTORE_STACK \
1701  VALGRIND_CFI_EPILOGUE \
1702  : /*out*/ "=a" (_res) \
1703  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1704  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1705  ); \
1706  lval = (__typeof__(lval)) _res; \
1707  } while (0)
1708 
1709 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1710  arg7) \
1711  do { \
1712  volatile OrigFn _orig = (orig); \
1713  volatile unsigned long _argvec[8]; \
1714  volatile unsigned long _res; \
1715  _argvec[0] = (unsigned long)_orig.nraddr; \
1716  _argvec[1] = (unsigned long)(arg1); \
1717  _argvec[2] = (unsigned long)(arg2); \
1718  _argvec[3] = (unsigned long)(arg3); \
1719  _argvec[4] = (unsigned long)(arg4); \
1720  _argvec[5] = (unsigned long)(arg5); \
1721  _argvec[6] = (unsigned long)(arg6); \
1722  _argvec[7] = (unsigned long)(arg7); \
1723  __asm__ volatile( \
1724  VALGRIND_CFI_PROLOGUE \
1725  VALGRIND_ALIGN_STACK \
1726  "subq $136,%%rsp\n\t" \
1727  "pushq 56(%%rax)\n\t" \
1728  "movq 48(%%rax), %%r9\n\t" \
1729  "movq 40(%%rax), %%r8\n\t" \
1730  "movq 32(%%rax), %%rcx\n\t" \
1731  "movq 24(%%rax), %%rdx\n\t" \
1732  "movq 16(%%rax), %%rsi\n\t" \
1733  "movq 8(%%rax), %%rdi\n\t" \
1734  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1735  VALGRIND_CALL_NOREDIR_RAX \
1736  VALGRIND_RESTORE_STACK \
1737  VALGRIND_CFI_EPILOGUE \
1738  : /*out*/ "=a" (_res) \
1739  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1740  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1741  ); \
1742  lval = (__typeof__(lval)) _res; \
1743  } while (0)
1744 
1745 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1746  arg7,arg8) \
1747  do { \
1748  volatile OrigFn _orig = (orig); \
1749  volatile unsigned long _argvec[9]; \
1750  volatile unsigned long _res; \
1751  _argvec[0] = (unsigned long)_orig.nraddr; \
1752  _argvec[1] = (unsigned long)(arg1); \
1753  _argvec[2] = (unsigned long)(arg2); \
1754  _argvec[3] = (unsigned long)(arg3); \
1755  _argvec[4] = (unsigned long)(arg4); \
1756  _argvec[5] = (unsigned long)(arg5); \
1757  _argvec[6] = (unsigned long)(arg6); \
1758  _argvec[7] = (unsigned long)(arg7); \
1759  _argvec[8] = (unsigned long)(arg8); \
1760  __asm__ volatile( \
1761  VALGRIND_CFI_PROLOGUE \
1762  VALGRIND_ALIGN_STACK \
1763  "subq $128,%%rsp\n\t" \
1764  "pushq 64(%%rax)\n\t" \
1765  "pushq 56(%%rax)\n\t" \
1766  "movq 48(%%rax), %%r9\n\t" \
1767  "movq 40(%%rax), %%r8\n\t" \
1768  "movq 32(%%rax), %%rcx\n\t" \
1769  "movq 24(%%rax), %%rdx\n\t" \
1770  "movq 16(%%rax), %%rsi\n\t" \
1771  "movq 8(%%rax), %%rdi\n\t" \
1772  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1773  VALGRIND_CALL_NOREDIR_RAX \
1774  VALGRIND_RESTORE_STACK \
1775  VALGRIND_CFI_EPILOGUE \
1776  : /*out*/ "=a" (_res) \
1777  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1778  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1779  ); \
1780  lval = (__typeof__(lval)) _res; \
1781  } while (0)
1782 
1783 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1784  arg7,arg8,arg9) \
1785  do { \
1786  volatile OrigFn _orig = (orig); \
1787  volatile unsigned long _argvec[10]; \
1788  volatile unsigned long _res; \
1789  _argvec[0] = (unsigned long)_orig.nraddr; \
1790  _argvec[1] = (unsigned long)(arg1); \
1791  _argvec[2] = (unsigned long)(arg2); \
1792  _argvec[3] = (unsigned long)(arg3); \
1793  _argvec[4] = (unsigned long)(arg4); \
1794  _argvec[5] = (unsigned long)(arg5); \
1795  _argvec[6] = (unsigned long)(arg6); \
1796  _argvec[7] = (unsigned long)(arg7); \
1797  _argvec[8] = (unsigned long)(arg8); \
1798  _argvec[9] = (unsigned long)(arg9); \
1799  __asm__ volatile( \
1800  VALGRIND_CFI_PROLOGUE \
1801  VALGRIND_ALIGN_STACK \
1802  "subq $136,%%rsp\n\t" \
1803  "pushq 72(%%rax)\n\t" \
1804  "pushq 64(%%rax)\n\t" \
1805  "pushq 56(%%rax)\n\t" \
1806  "movq 48(%%rax), %%r9\n\t" \
1807  "movq 40(%%rax), %%r8\n\t" \
1808  "movq 32(%%rax), %%rcx\n\t" \
1809  "movq 24(%%rax), %%rdx\n\t" \
1810  "movq 16(%%rax), %%rsi\n\t" \
1811  "movq 8(%%rax), %%rdi\n\t" \
1812  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1813  VALGRIND_CALL_NOREDIR_RAX \
1814  VALGRIND_RESTORE_STACK \
1815  VALGRIND_CFI_EPILOGUE \
1816  : /*out*/ "=a" (_res) \
1817  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1818  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1819  ); \
1820  lval = (__typeof__(lval)) _res; \
1821  } while (0)
1822 
1823 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1824  arg7,arg8,arg9,arg10) \
1825  do { \
1826  volatile OrigFn _orig = (orig); \
1827  volatile unsigned long _argvec[11]; \
1828  volatile unsigned long _res; \
1829  _argvec[0] = (unsigned long)_orig.nraddr; \
1830  _argvec[1] = (unsigned long)(arg1); \
1831  _argvec[2] = (unsigned long)(arg2); \
1832  _argvec[3] = (unsigned long)(arg3); \
1833  _argvec[4] = (unsigned long)(arg4); \
1834  _argvec[5] = (unsigned long)(arg5); \
1835  _argvec[6] = (unsigned long)(arg6); \
1836  _argvec[7] = (unsigned long)(arg7); \
1837  _argvec[8] = (unsigned long)(arg8); \
1838  _argvec[9] = (unsigned long)(arg9); \
1839  _argvec[10] = (unsigned long)(arg10); \
1840  __asm__ volatile( \
1841  VALGRIND_CFI_PROLOGUE \
1842  VALGRIND_ALIGN_STACK \
1843  "subq $128,%%rsp\n\t" \
1844  "pushq 80(%%rax)\n\t" \
1845  "pushq 72(%%rax)\n\t" \
1846  "pushq 64(%%rax)\n\t" \
1847  "pushq 56(%%rax)\n\t" \
1848  "movq 48(%%rax), %%r9\n\t" \
1849  "movq 40(%%rax), %%r8\n\t" \
1850  "movq 32(%%rax), %%rcx\n\t" \
1851  "movq 24(%%rax), %%rdx\n\t" \
1852  "movq 16(%%rax), %%rsi\n\t" \
1853  "movq 8(%%rax), %%rdi\n\t" \
1854  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1855  VALGRIND_CALL_NOREDIR_RAX \
1856  VALGRIND_RESTORE_STACK \
1857  VALGRIND_CFI_EPILOGUE \
1858  : /*out*/ "=a" (_res) \
1859  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1860  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1861  ); \
1862  lval = (__typeof__(lval)) _res; \
1863  } while (0)
1864 
1865 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1866  arg7,arg8,arg9,arg10,arg11) \
1867  do { \
1868  volatile OrigFn _orig = (orig); \
1869  volatile unsigned long _argvec[12]; \
1870  volatile unsigned long _res; \
1871  _argvec[0] = (unsigned long)_orig.nraddr; \
1872  _argvec[1] = (unsigned long)(arg1); \
1873  _argvec[2] = (unsigned long)(arg2); \
1874  _argvec[3] = (unsigned long)(arg3); \
1875  _argvec[4] = (unsigned long)(arg4); \
1876  _argvec[5] = (unsigned long)(arg5); \
1877  _argvec[6] = (unsigned long)(arg6); \
1878  _argvec[7] = (unsigned long)(arg7); \
1879  _argvec[8] = (unsigned long)(arg8); \
1880  _argvec[9] = (unsigned long)(arg9); \
1881  _argvec[10] = (unsigned long)(arg10); \
1882  _argvec[11] = (unsigned long)(arg11); \
1883  __asm__ volatile( \
1884  VALGRIND_CFI_PROLOGUE \
1885  VALGRIND_ALIGN_STACK \
1886  "subq $136,%%rsp\n\t" \
1887  "pushq 88(%%rax)\n\t" \
1888  "pushq 80(%%rax)\n\t" \
1889  "pushq 72(%%rax)\n\t" \
1890  "pushq 64(%%rax)\n\t" \
1891  "pushq 56(%%rax)\n\t" \
1892  "movq 48(%%rax), %%r9\n\t" \
1893  "movq 40(%%rax), %%r8\n\t" \
1894  "movq 32(%%rax), %%rcx\n\t" \
1895  "movq 24(%%rax), %%rdx\n\t" \
1896  "movq 16(%%rax), %%rsi\n\t" \
1897  "movq 8(%%rax), %%rdi\n\t" \
1898  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1899  VALGRIND_CALL_NOREDIR_RAX \
1900  VALGRIND_RESTORE_STACK \
1901  VALGRIND_CFI_EPILOGUE \
1902  : /*out*/ "=a" (_res) \
1903  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1904  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1905  ); \
1906  lval = (__typeof__(lval)) _res; \
1907  } while (0)
1908 
1909 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1910  arg7,arg8,arg9,arg10,arg11,arg12) \
1911  do { \
1912  volatile OrigFn _orig = (orig); \
1913  volatile unsigned long _argvec[13]; \
1914  volatile unsigned long _res; \
1915  _argvec[0] = (unsigned long)_orig.nraddr; \
1916  _argvec[1] = (unsigned long)(arg1); \
1917  _argvec[2] = (unsigned long)(arg2); \
1918  _argvec[3] = (unsigned long)(arg3); \
1919  _argvec[4] = (unsigned long)(arg4); \
1920  _argvec[5] = (unsigned long)(arg5); \
1921  _argvec[6] = (unsigned long)(arg6); \
1922  _argvec[7] = (unsigned long)(arg7); \
1923  _argvec[8] = (unsigned long)(arg8); \
1924  _argvec[9] = (unsigned long)(arg9); \
1925  _argvec[10] = (unsigned long)(arg10); \
1926  _argvec[11] = (unsigned long)(arg11); \
1927  _argvec[12] = (unsigned long)(arg12); \
1928  __asm__ volatile( \
1929  VALGRIND_CFI_PROLOGUE \
1930  VALGRIND_ALIGN_STACK \
1931  "subq $128,%%rsp\n\t" \
1932  "pushq 96(%%rax)\n\t" \
1933  "pushq 88(%%rax)\n\t" \
1934  "pushq 80(%%rax)\n\t" \
1935  "pushq 72(%%rax)\n\t" \
1936  "pushq 64(%%rax)\n\t" \
1937  "pushq 56(%%rax)\n\t" \
1938  "movq 48(%%rax), %%r9\n\t" \
1939  "movq 40(%%rax), %%r8\n\t" \
1940  "movq 32(%%rax), %%rcx\n\t" \
1941  "movq 24(%%rax), %%rdx\n\t" \
1942  "movq 16(%%rax), %%rsi\n\t" \
1943  "movq 8(%%rax), %%rdi\n\t" \
1944  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1945  VALGRIND_CALL_NOREDIR_RAX \
1946  VALGRIND_RESTORE_STACK \
1947  VALGRIND_CFI_EPILOGUE \
1948  : /*out*/ "=a" (_res) \
1949  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1950  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1951  ); \
1952  lval = (__typeof__(lval)) _res; \
1953  } while (0)
1954 
1955 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
1956 
1957 /* ------------------------ ppc32-linux ------------------------ */
1958 
1959 #if defined(PLAT_ppc32_linux)
1960 
1961 /* This is useful for finding out about the on-stack stuff:
1962 
1963  extern int f9 ( int,int,int,int,int,int,int,int,int );
1964  extern int f10 ( int,int,int,int,int,int,int,int,int,int );
1965  extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
1966  extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
1967 
1968  int g9 ( void ) {
1969  return f9(11,22,33,44,55,66,77,88,99);
1970  }
1971  int g10 ( void ) {
1972  return f10(11,22,33,44,55,66,77,88,99,110);
1973  }
1974  int g11 ( void ) {
1975  return f11(11,22,33,44,55,66,77,88,99,110,121);
1976  }
1977  int g12 ( void ) {
1978  return f12(11,22,33,44,55,66,77,88,99,110,121,132);
1979  }
1980 */
1981 
1982 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
1983 
1984 /* These regs are trashed by the hidden call. */
1985 #define __CALLER_SAVED_REGS \
1986  "lr", "ctr", "xer", \
1987  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
1988  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
1989  "r11", "r12", "r13"
1990 
1991 /* Macros to save and align the stack before making a function
1992  call and restore it afterwards as gcc may not keep the stack
1993  pointer aligned if it doesn't realise calls are being made
1994  to other functions. */
1995 
1996 #define VALGRIND_ALIGN_STACK \
1997  "mr 28,1\n\t" \
1998  "rlwinm 1,1,0,0,27\n\t"
1999 #define VALGRIND_RESTORE_STACK \
2000  "mr 1,28\n\t"
2001 
2002 /* These CALL_FN_ macros assume that on ppc32-linux,
2003  sizeof(unsigned long) == 4. */
2004 
2005 #define CALL_FN_W_v(lval, orig) \
2006  do { \
2007  volatile OrigFn _orig = (orig); \
2008  volatile unsigned long _argvec[1]; \
2009  volatile unsigned long _res; \
2010  _argvec[0] = (unsigned long)_orig.nraddr; \
2011  __asm__ volatile( \
2012  VALGRIND_ALIGN_STACK \
2013  "mr 11,%1\n\t" \
2014  "lwz 11,0(11)\n\t" /* target->r11 */ \
2015  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2016  VALGRIND_RESTORE_STACK \
2017  "mr %0,3" \
2018  : /*out*/ "=r" (_res) \
2019  : /*in*/ "r" (&_argvec[0]) \
2020  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2021  ); \
2022  lval = (__typeof__(lval)) _res; \
2023  } while (0)
2024 
2025 #define CALL_FN_W_W(lval, orig, arg1) \
2026  do { \
2027  volatile OrigFn _orig = (orig); \
2028  volatile unsigned long _argvec[2]; \
2029  volatile unsigned long _res; \
2030  _argvec[0] = (unsigned long)_orig.nraddr; \
2031  _argvec[1] = (unsigned long)arg1; \
2032  __asm__ volatile( \
2033  VALGRIND_ALIGN_STACK \
2034  "mr 11,%1\n\t" \
2035  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2036  "lwz 11,0(11)\n\t" /* target->r11 */ \
2037  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2038  VALGRIND_RESTORE_STACK \
2039  "mr %0,3" \
2040  : /*out*/ "=r" (_res) \
2041  : /*in*/ "r" (&_argvec[0]) \
2042  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2043  ); \
2044  lval = (__typeof__(lval)) _res; \
2045  } while (0)
2046 
2047 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2048  do { \
2049  volatile OrigFn _orig = (orig); \
2050  volatile unsigned long _argvec[3]; \
2051  volatile unsigned long _res; \
2052  _argvec[0] = (unsigned long)_orig.nraddr; \
2053  _argvec[1] = (unsigned long)arg1; \
2054  _argvec[2] = (unsigned long)arg2; \
2055  __asm__ volatile( \
2056  VALGRIND_ALIGN_STACK \
2057  "mr 11,%1\n\t" \
2058  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2059  "lwz 4,8(11)\n\t" \
2060  "lwz 11,0(11)\n\t" /* target->r11 */ \
2061  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2062  VALGRIND_RESTORE_STACK \
2063  "mr %0,3" \
2064  : /*out*/ "=r" (_res) \
2065  : /*in*/ "r" (&_argvec[0]) \
2066  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2067  ); \
2068  lval = (__typeof__(lval)) _res; \
2069  } while (0)
2070 
2071 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2072  do { \
2073  volatile OrigFn _orig = (orig); \
2074  volatile unsigned long _argvec[4]; \
2075  volatile unsigned long _res; \
2076  _argvec[0] = (unsigned long)_orig.nraddr; \
2077  _argvec[1] = (unsigned long)arg1; \
2078  _argvec[2] = (unsigned long)arg2; \
2079  _argvec[3] = (unsigned long)arg3; \
2080  __asm__ volatile( \
2081  VALGRIND_ALIGN_STACK \
2082  "mr 11,%1\n\t" \
2083  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2084  "lwz 4,8(11)\n\t" \
2085  "lwz 5,12(11)\n\t" \
2086  "lwz 11,0(11)\n\t" /* target->r11 */ \
2087  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2088  VALGRIND_RESTORE_STACK \
2089  "mr %0,3" \
2090  : /*out*/ "=r" (_res) \
2091  : /*in*/ "r" (&_argvec[0]) \
2092  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2093  ); \
2094  lval = (__typeof__(lval)) _res; \
2095  } while (0)
2096 
2097 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2098  do { \
2099  volatile OrigFn _orig = (orig); \
2100  volatile unsigned long _argvec[5]; \
2101  volatile unsigned long _res; \
2102  _argvec[0] = (unsigned long)_orig.nraddr; \
2103  _argvec[1] = (unsigned long)arg1; \
2104  _argvec[2] = (unsigned long)arg2; \
2105  _argvec[3] = (unsigned long)arg3; \
2106  _argvec[4] = (unsigned long)arg4; \
2107  __asm__ volatile( \
2108  VALGRIND_ALIGN_STACK \
2109  "mr 11,%1\n\t" \
2110  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2111  "lwz 4,8(11)\n\t" \
2112  "lwz 5,12(11)\n\t" \
2113  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2114  "lwz 11,0(11)\n\t" /* target->r11 */ \
2115  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2116  VALGRIND_RESTORE_STACK \
2117  "mr %0,3" \
2118  : /*out*/ "=r" (_res) \
2119  : /*in*/ "r" (&_argvec[0]) \
2120  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2121  ); \
2122  lval = (__typeof__(lval)) _res; \
2123  } while (0)
2124 
2125 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2126  do { \
2127  volatile OrigFn _orig = (orig); \
2128  volatile unsigned long _argvec[6]; \
2129  volatile unsigned long _res; \
2130  _argvec[0] = (unsigned long)_orig.nraddr; \
2131  _argvec[1] = (unsigned long)arg1; \
2132  _argvec[2] = (unsigned long)arg2; \
2133  _argvec[3] = (unsigned long)arg3; \
2134  _argvec[4] = (unsigned long)arg4; \
2135  _argvec[5] = (unsigned long)arg5; \
2136  __asm__ volatile( \
2137  VALGRIND_ALIGN_STACK \
2138  "mr 11,%1\n\t" \
2139  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2140  "lwz 4,8(11)\n\t" \
2141  "lwz 5,12(11)\n\t" \
2142  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2143  "lwz 7,20(11)\n\t" \
2144  "lwz 11,0(11)\n\t" /* target->r11 */ \
2145  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2146  VALGRIND_RESTORE_STACK \
2147  "mr %0,3" \
2148  : /*out*/ "=r" (_res) \
2149  : /*in*/ "r" (&_argvec[0]) \
2150  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2151  ); \
2152  lval = (__typeof__(lval)) _res; \
2153  } while (0)
2154 
2155 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2156  do { \
2157  volatile OrigFn _orig = (orig); \
2158  volatile unsigned long _argvec[7]; \
2159  volatile unsigned long _res; \
2160  _argvec[0] = (unsigned long)_orig.nraddr; \
2161  _argvec[1] = (unsigned long)arg1; \
2162  _argvec[2] = (unsigned long)arg2; \
2163  _argvec[3] = (unsigned long)arg3; \
2164  _argvec[4] = (unsigned long)arg4; \
2165  _argvec[5] = (unsigned long)arg5; \
2166  _argvec[6] = (unsigned long)arg6; \
2167  __asm__ volatile( \
2168  VALGRIND_ALIGN_STACK \
2169  "mr 11,%1\n\t" \
2170  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2171  "lwz 4,8(11)\n\t" \
2172  "lwz 5,12(11)\n\t" \
2173  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2174  "lwz 7,20(11)\n\t" \
2175  "lwz 8,24(11)\n\t" \
2176  "lwz 11,0(11)\n\t" /* target->r11 */ \
2177  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2178  VALGRIND_RESTORE_STACK \
2179  "mr %0,3" \
2180  : /*out*/ "=r" (_res) \
2181  : /*in*/ "r" (&_argvec[0]) \
2182  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2183  ); \
2184  lval = (__typeof__(lval)) _res; \
2185  } while (0)
2186 
2187 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2188  arg7) \
2189  do { \
2190  volatile OrigFn _orig = (orig); \
2191  volatile unsigned long _argvec[8]; \
2192  volatile unsigned long _res; \
2193  _argvec[0] = (unsigned long)_orig.nraddr; \
2194  _argvec[1] = (unsigned long)arg1; \
2195  _argvec[2] = (unsigned long)arg2; \
2196  _argvec[3] = (unsigned long)arg3; \
2197  _argvec[4] = (unsigned long)arg4; \
2198  _argvec[5] = (unsigned long)arg5; \
2199  _argvec[6] = (unsigned long)arg6; \
2200  _argvec[7] = (unsigned long)arg7; \
2201  __asm__ volatile( \
2202  VALGRIND_ALIGN_STACK \
2203  "mr 11,%1\n\t" \
2204  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2205  "lwz 4,8(11)\n\t" \
2206  "lwz 5,12(11)\n\t" \
2207  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2208  "lwz 7,20(11)\n\t" \
2209  "lwz 8,24(11)\n\t" \
2210  "lwz 9,28(11)\n\t" \
2211  "lwz 11,0(11)\n\t" /* target->r11 */ \
2212  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2213  VALGRIND_RESTORE_STACK \
2214  "mr %0,3" \
2215  : /*out*/ "=r" (_res) \
2216  : /*in*/ "r" (&_argvec[0]) \
2217  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2218  ); \
2219  lval = (__typeof__(lval)) _res; \
2220  } while (0)
2221 
2222 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2223  arg7,arg8) \
2224  do { \
2225  volatile OrigFn _orig = (orig); \
2226  volatile unsigned long _argvec[9]; \
2227  volatile unsigned long _res; \
2228  _argvec[0] = (unsigned long)_orig.nraddr; \
2229  _argvec[1] = (unsigned long)arg1; \
2230  _argvec[2] = (unsigned long)arg2; \
2231  _argvec[3] = (unsigned long)arg3; \
2232  _argvec[4] = (unsigned long)arg4; \
2233  _argvec[5] = (unsigned long)arg5; \
2234  _argvec[6] = (unsigned long)arg6; \
2235  _argvec[7] = (unsigned long)arg7; \
2236  _argvec[8] = (unsigned long)arg8; \
2237  __asm__ volatile( \
2238  VALGRIND_ALIGN_STACK \
2239  "mr 11,%1\n\t" \
2240  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2241  "lwz 4,8(11)\n\t" \
2242  "lwz 5,12(11)\n\t" \
2243  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2244  "lwz 7,20(11)\n\t" \
2245  "lwz 8,24(11)\n\t" \
2246  "lwz 9,28(11)\n\t" \
2247  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2248  "lwz 11,0(11)\n\t" /* target->r11 */ \
2249  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2250  VALGRIND_RESTORE_STACK \
2251  "mr %0,3" \
2252  : /*out*/ "=r" (_res) \
2253  : /*in*/ "r" (&_argvec[0]) \
2254  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2255  ); \
2256  lval = (__typeof__(lval)) _res; \
2257  } while (0)
2258 
2259 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2260  arg7,arg8,arg9) \
2261  do { \
2262  volatile OrigFn _orig = (orig); \
2263  volatile unsigned long _argvec[10]; \
2264  volatile unsigned long _res; \
2265  _argvec[0] = (unsigned long)_orig.nraddr; \
2266  _argvec[1] = (unsigned long)arg1; \
2267  _argvec[2] = (unsigned long)arg2; \
2268  _argvec[3] = (unsigned long)arg3; \
2269  _argvec[4] = (unsigned long)arg4; \
2270  _argvec[5] = (unsigned long)arg5; \
2271  _argvec[6] = (unsigned long)arg6; \
2272  _argvec[7] = (unsigned long)arg7; \
2273  _argvec[8] = (unsigned long)arg8; \
2274  _argvec[9] = (unsigned long)arg9; \
2275  __asm__ volatile( \
2276  VALGRIND_ALIGN_STACK \
2277  "mr 11,%1\n\t" \
2278  "addi 1,1,-16\n\t" \
2279  /* arg9 */ \
2280  "lwz 3,36(11)\n\t" \
2281  "stw 3,8(1)\n\t" \
2282  /* args1-8 */ \
2283  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2284  "lwz 4,8(11)\n\t" \
2285  "lwz 5,12(11)\n\t" \
2286  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2287  "lwz 7,20(11)\n\t" \
2288  "lwz 8,24(11)\n\t" \
2289  "lwz 9,28(11)\n\t" \
2290  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2291  "lwz 11,0(11)\n\t" /* target->r11 */ \
2292  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2293  VALGRIND_RESTORE_STACK \
2294  "mr %0,3" \
2295  : /*out*/ "=r" (_res) \
2296  : /*in*/ "r" (&_argvec[0]) \
2297  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2298  ); \
2299  lval = (__typeof__(lval)) _res; \
2300  } while (0)
2301 
2302 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2303  arg7,arg8,arg9,arg10) \
2304  do { \
2305  volatile OrigFn _orig = (orig); \
2306  volatile unsigned long _argvec[11]; \
2307  volatile unsigned long _res; \
2308  _argvec[0] = (unsigned long)_orig.nraddr; \
2309  _argvec[1] = (unsigned long)arg1; \
2310  _argvec[2] = (unsigned long)arg2; \
2311  _argvec[3] = (unsigned long)arg3; \
2312  _argvec[4] = (unsigned long)arg4; \
2313  _argvec[5] = (unsigned long)arg5; \
2314  _argvec[6] = (unsigned long)arg6; \
2315  _argvec[7] = (unsigned long)arg7; \
2316  _argvec[8] = (unsigned long)arg8; \
2317  _argvec[9] = (unsigned long)arg9; \
2318  _argvec[10] = (unsigned long)arg10; \
2319  __asm__ volatile( \
2320  VALGRIND_ALIGN_STACK \
2321  "mr 11,%1\n\t" \
2322  "addi 1,1,-16\n\t" \
2323  /* arg10 */ \
2324  "lwz 3,40(11)\n\t" \
2325  "stw 3,12(1)\n\t" \
2326  /* arg9 */ \
2327  "lwz 3,36(11)\n\t" \
2328  "stw 3,8(1)\n\t" \
2329  /* args1-8 */ \
2330  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2331  "lwz 4,8(11)\n\t" \
2332  "lwz 5,12(11)\n\t" \
2333  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2334  "lwz 7,20(11)\n\t" \
2335  "lwz 8,24(11)\n\t" \
2336  "lwz 9,28(11)\n\t" \
2337  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2338  "lwz 11,0(11)\n\t" /* target->r11 */ \
2339  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2340  VALGRIND_RESTORE_STACK \
2341  "mr %0,3" \
2342  : /*out*/ "=r" (_res) \
2343  : /*in*/ "r" (&_argvec[0]) \
2344  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2345  ); \
2346  lval = (__typeof__(lval)) _res; \
2347  } while (0)
2348 
2349 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2350  arg7,arg8,arg9,arg10,arg11) \
2351  do { \
2352  volatile OrigFn _orig = (orig); \
2353  volatile unsigned long _argvec[12]; \
2354  volatile unsigned long _res; \
2355  _argvec[0] = (unsigned long)_orig.nraddr; \
2356  _argvec[1] = (unsigned long)arg1; \
2357  _argvec[2] = (unsigned long)arg2; \
2358  _argvec[3] = (unsigned long)arg3; \
2359  _argvec[4] = (unsigned long)arg4; \
2360  _argvec[5] = (unsigned long)arg5; \
2361  _argvec[6] = (unsigned long)arg6; \
2362  _argvec[7] = (unsigned long)arg7; \
2363  _argvec[8] = (unsigned long)arg8; \
2364  _argvec[9] = (unsigned long)arg9; \
2365  _argvec[10] = (unsigned long)arg10; \
2366  _argvec[11] = (unsigned long)arg11; \
2367  __asm__ volatile( \
2368  VALGRIND_ALIGN_STACK \
2369  "mr 11,%1\n\t" \
2370  "addi 1,1,-32\n\t" \
2371  /* arg11 */ \
2372  "lwz 3,44(11)\n\t" \
2373  "stw 3,16(1)\n\t" \
2374  /* arg10 */ \
2375  "lwz 3,40(11)\n\t" \
2376  "stw 3,12(1)\n\t" \
2377  /* arg9 */ \
2378  "lwz 3,36(11)\n\t" \
2379  "stw 3,8(1)\n\t" \
2380  /* args1-8 */ \
2381  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2382  "lwz 4,8(11)\n\t" \
2383  "lwz 5,12(11)\n\t" \
2384  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2385  "lwz 7,20(11)\n\t" \
2386  "lwz 8,24(11)\n\t" \
2387  "lwz 9,28(11)\n\t" \
2388  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2389  "lwz 11,0(11)\n\t" /* target->r11 */ \
2390  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2391  VALGRIND_RESTORE_STACK \
2392  "mr %0,3" \
2393  : /*out*/ "=r" (_res) \
2394  : /*in*/ "r" (&_argvec[0]) \
2395  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2396  ); \
2397  lval = (__typeof__(lval)) _res; \
2398  } while (0)
2399 
2400 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2401  arg7,arg8,arg9,arg10,arg11,arg12) \
2402  do { \
2403  volatile OrigFn _orig = (orig); \
2404  volatile unsigned long _argvec[13]; \
2405  volatile unsigned long _res; \
2406  _argvec[0] = (unsigned long)_orig.nraddr; \
2407  _argvec[1] = (unsigned long)arg1; \
2408  _argvec[2] = (unsigned long)arg2; \
2409  _argvec[3] = (unsigned long)arg3; \
2410  _argvec[4] = (unsigned long)arg4; \
2411  _argvec[5] = (unsigned long)arg5; \
2412  _argvec[6] = (unsigned long)arg6; \
2413  _argvec[7] = (unsigned long)arg7; \
2414  _argvec[8] = (unsigned long)arg8; \
2415  _argvec[9] = (unsigned long)arg9; \
2416  _argvec[10] = (unsigned long)arg10; \
2417  _argvec[11] = (unsigned long)arg11; \
2418  _argvec[12] = (unsigned long)arg12; \
2419  __asm__ volatile( \
2420  VALGRIND_ALIGN_STACK \
2421  "mr 11,%1\n\t" \
2422  "addi 1,1,-32\n\t" \
2423  /* arg12 */ \
2424  "lwz 3,48(11)\n\t" \
2425  "stw 3,20(1)\n\t" \
2426  /* arg11 */ \
2427  "lwz 3,44(11)\n\t" \
2428  "stw 3,16(1)\n\t" \
2429  /* arg10 */ \
2430  "lwz 3,40(11)\n\t" \
2431  "stw 3,12(1)\n\t" \
2432  /* arg9 */ \
2433  "lwz 3,36(11)\n\t" \
2434  "stw 3,8(1)\n\t" \
2435  /* args1-8 */ \
2436  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2437  "lwz 4,8(11)\n\t" \
2438  "lwz 5,12(11)\n\t" \
2439  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2440  "lwz 7,20(11)\n\t" \
2441  "lwz 8,24(11)\n\t" \
2442  "lwz 9,28(11)\n\t" \
2443  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2444  "lwz 11,0(11)\n\t" /* target->r11 */ \
2445  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2446  VALGRIND_RESTORE_STACK \
2447  "mr %0,3" \
2448  : /*out*/ "=r" (_res) \
2449  : /*in*/ "r" (&_argvec[0]) \
2450  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2451  ); \
2452  lval = (__typeof__(lval)) _res; \
2453  } while (0)
2454 
2455 #endif /* PLAT_ppc32_linux */
2456 
2457 /* ------------------------ ppc64-linux ------------------------ */
2458 
2459 #if defined(PLAT_ppc64_linux)
2460 
2461 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2462 
2463 /* These regs are trashed by the hidden call. */
2464 #define __CALLER_SAVED_REGS \
2465  "lr", "ctr", "xer", \
2466  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2467  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2468  "r11", "r12", "r13"
2469 
2470 /* Macros to save and align the stack before making a function
2471  call and restore it afterwards as gcc may not keep the stack
2472  pointer aligned if it doesn't realise calls are being made
2473  to other functions. */
2474 
2475 #define VALGRIND_ALIGN_STACK \
2476  "mr 28,1\n\t" \
2477  "rldicr 1,1,0,59\n\t"
2478 #define VALGRIND_RESTORE_STACK \
2479  "mr 1,28\n\t"
2480 
2481 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2482  long) == 8. */
2483 
2484 #define CALL_FN_W_v(lval, orig) \
2485  do { \
2486  volatile OrigFn _orig = (orig); \
2487  volatile unsigned long _argvec[3+0]; \
2488  volatile unsigned long _res; \
2489  /* _argvec[0] holds current r2 across the call */ \
2490  _argvec[1] = (unsigned long)_orig.r2; \
2491  _argvec[2] = (unsigned long)_orig.nraddr; \
2492  __asm__ volatile( \
2493  VALGRIND_ALIGN_STACK \
2494  "mr 11,%1\n\t" \
2495  "std 2,-16(11)\n\t" /* save tocptr */ \
2496  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2497  "ld 11, 0(11)\n\t" /* target->r11 */ \
2498  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2499  "mr 11,%1\n\t" \
2500  "mr %0,3\n\t" \
2501  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2502  VALGRIND_RESTORE_STACK \
2503  : /*out*/ "=r" (_res) \
2504  : /*in*/ "r" (&_argvec[2]) \
2505  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2506  ); \
2507  lval = (__typeof__(lval)) _res; \
2508  } while (0)
2509 
2510 #define CALL_FN_W_W(lval, orig, arg1) \
2511  do { \
2512  volatile OrigFn _orig = (orig); \
2513  volatile unsigned long _argvec[3+1]; \
2514  volatile unsigned long _res; \
2515  /* _argvec[0] holds current r2 across the call */ \
2516  _argvec[1] = (unsigned long)_orig.r2; \
2517  _argvec[2] = (unsigned long)_orig.nraddr; \
2518  _argvec[2+1] = (unsigned long)arg1; \
2519  __asm__ volatile( \
2520  VALGRIND_ALIGN_STACK \
2521  "mr 11,%1\n\t" \
2522  "std 2,-16(11)\n\t" /* save tocptr */ \
2523  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2524  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2525  "ld 11, 0(11)\n\t" /* target->r11 */ \
2526  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2527  "mr 11,%1\n\t" \
2528  "mr %0,3\n\t" \
2529  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2530  VALGRIND_RESTORE_STACK \
2531  : /*out*/ "=r" (_res) \
2532  : /*in*/ "r" (&_argvec[2]) \
2533  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2534  ); \
2535  lval = (__typeof__(lval)) _res; \
2536  } while (0)
2537 
2538 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2539  do { \
2540  volatile OrigFn _orig = (orig); \
2541  volatile unsigned long _argvec[3+2]; \
2542  volatile unsigned long _res; \
2543  /* _argvec[0] holds current r2 across the call */ \
2544  _argvec[1] = (unsigned long)_orig.r2; \
2545  _argvec[2] = (unsigned long)_orig.nraddr; \
2546  _argvec[2+1] = (unsigned long)arg1; \
2547  _argvec[2+2] = (unsigned long)arg2; \
2548  __asm__ volatile( \
2549  VALGRIND_ALIGN_STACK \
2550  "mr 11,%1\n\t" \
2551  "std 2,-16(11)\n\t" /* save tocptr */ \
2552  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2553  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2554  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2555  "ld 11, 0(11)\n\t" /* target->r11 */ \
2556  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2557  "mr 11,%1\n\t" \
2558  "mr %0,3\n\t" \
2559  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2560  VALGRIND_RESTORE_STACK \
2561  : /*out*/ "=r" (_res) \
2562  : /*in*/ "r" (&_argvec[2]) \
2563  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2564  ); \
2565  lval = (__typeof__(lval)) _res; \
2566  } while (0)
2567 
2568 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2569  do { \
2570  volatile OrigFn _orig = (orig); \
2571  volatile unsigned long _argvec[3+3]; \
2572  volatile unsigned long _res; \
2573  /* _argvec[0] holds current r2 across the call */ \
2574  _argvec[1] = (unsigned long)_orig.r2; \
2575  _argvec[2] = (unsigned long)_orig.nraddr; \
2576  _argvec[2+1] = (unsigned long)arg1; \
2577  _argvec[2+2] = (unsigned long)arg2; \
2578  _argvec[2+3] = (unsigned long)arg3; \
2579  __asm__ volatile( \
2580  VALGRIND_ALIGN_STACK \
2581  "mr 11,%1\n\t" \
2582  "std 2,-16(11)\n\t" /* save tocptr */ \
2583  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2584  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2585  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2586  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2587  "ld 11, 0(11)\n\t" /* target->r11 */ \
2588  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2589  "mr 11,%1\n\t" \
2590  "mr %0,3\n\t" \
2591  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2592  VALGRIND_RESTORE_STACK \
2593  : /*out*/ "=r" (_res) \
2594  : /*in*/ "r" (&_argvec[2]) \
2595  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2596  ); \
2597  lval = (__typeof__(lval)) _res; \
2598  } while (0)
2599 
2600 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2601  do { \
2602  volatile OrigFn _orig = (orig); \
2603  volatile unsigned long _argvec[3+4]; \
2604  volatile unsigned long _res; \
2605  /* _argvec[0] holds current r2 across the call */ \
2606  _argvec[1] = (unsigned long)_orig.r2; \
2607  _argvec[2] = (unsigned long)_orig.nraddr; \
2608  _argvec[2+1] = (unsigned long)arg1; \
2609  _argvec[2+2] = (unsigned long)arg2; \
2610  _argvec[2+3] = (unsigned long)arg3; \
2611  _argvec[2+4] = (unsigned long)arg4; \
2612  __asm__ volatile( \
2613  VALGRIND_ALIGN_STACK \
2614  "mr 11,%1\n\t" \
2615  "std 2,-16(11)\n\t" /* save tocptr */ \
2616  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2617  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2618  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2619  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2620  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2621  "ld 11, 0(11)\n\t" /* target->r11 */ \
2622  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2623  "mr 11,%1\n\t" \
2624  "mr %0,3\n\t" \
2625  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2626  VALGRIND_RESTORE_STACK \
2627  : /*out*/ "=r" (_res) \
2628  : /*in*/ "r" (&_argvec[2]) \
2629  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2630  ); \
2631  lval = (__typeof__(lval)) _res; \
2632  } while (0)
2633 
2634 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2635  do { \
2636  volatile OrigFn _orig = (orig); \
2637  volatile unsigned long _argvec[3+5]; \
2638  volatile unsigned long _res; \
2639  /* _argvec[0] holds current r2 across the call */ \
2640  _argvec[1] = (unsigned long)_orig.r2; \
2641  _argvec[2] = (unsigned long)_orig.nraddr; \
2642  _argvec[2+1] = (unsigned long)arg1; \
2643  _argvec[2+2] = (unsigned long)arg2; \
2644  _argvec[2+3] = (unsigned long)arg3; \
2645  _argvec[2+4] = (unsigned long)arg4; \
2646  _argvec[2+5] = (unsigned long)arg5; \
2647  __asm__ volatile( \
2648  VALGRIND_ALIGN_STACK \
2649  "mr 11,%1\n\t" \
2650  "std 2,-16(11)\n\t" /* save tocptr */ \
2651  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2652  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2653  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2654  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2655  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2656  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2657  "ld 11, 0(11)\n\t" /* target->r11 */ \
2658  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2659  "mr 11,%1\n\t" \
2660  "mr %0,3\n\t" \
2661  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2662  VALGRIND_RESTORE_STACK \
2663  : /*out*/ "=r" (_res) \
2664  : /*in*/ "r" (&_argvec[2]) \
2665  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2666  ); \
2667  lval = (__typeof__(lval)) _res; \
2668  } while (0)
2669 
2670 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2671  do { \
2672  volatile OrigFn _orig = (orig); \
2673  volatile unsigned long _argvec[3+6]; \
2674  volatile unsigned long _res; \
2675  /* _argvec[0] holds current r2 across the call */ \
2676  _argvec[1] = (unsigned long)_orig.r2; \
2677  _argvec[2] = (unsigned long)_orig.nraddr; \
2678  _argvec[2+1] = (unsigned long)arg1; \
2679  _argvec[2+2] = (unsigned long)arg2; \
2680  _argvec[2+3] = (unsigned long)arg3; \
2681  _argvec[2+4] = (unsigned long)arg4; \
2682  _argvec[2+5] = (unsigned long)arg5; \
2683  _argvec[2+6] = (unsigned long)arg6; \
2684  __asm__ volatile( \
2685  VALGRIND_ALIGN_STACK \
2686  "mr 11,%1\n\t" \
2687  "std 2,-16(11)\n\t" /* save tocptr */ \
2688  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2689  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2690  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2691  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2692  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2693  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2694  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2695  "ld 11, 0(11)\n\t" /* target->r11 */ \
2696  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2697  "mr 11,%1\n\t" \
2698  "mr %0,3\n\t" \
2699  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2700  VALGRIND_RESTORE_STACK \
2701  : /*out*/ "=r" (_res) \
2702  : /*in*/ "r" (&_argvec[2]) \
2703  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2704  ); \
2705  lval = (__typeof__(lval)) _res; \
2706  } while (0)
2707 
2708 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2709  arg7) \
2710  do { \
2711  volatile OrigFn _orig = (orig); \
2712  volatile unsigned long _argvec[3+7]; \
2713  volatile unsigned long _res; \
2714  /* _argvec[0] holds current r2 across the call */ \
2715  _argvec[1] = (unsigned long)_orig.r2; \
2716  _argvec[2] = (unsigned long)_orig.nraddr; \
2717  _argvec[2+1] = (unsigned long)arg1; \
2718  _argvec[2+2] = (unsigned long)arg2; \
2719  _argvec[2+3] = (unsigned long)arg3; \
2720  _argvec[2+4] = (unsigned long)arg4; \
2721  _argvec[2+5] = (unsigned long)arg5; \
2722  _argvec[2+6] = (unsigned long)arg6; \
2723  _argvec[2+7] = (unsigned long)arg7; \
2724  __asm__ volatile( \
2725  VALGRIND_ALIGN_STACK \
2726  "mr 11,%1\n\t" \
2727  "std 2,-16(11)\n\t" /* save tocptr */ \
2728  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2729  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2730  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2731  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2732  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2733  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2734  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2735  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2736  "ld 11, 0(11)\n\t" /* target->r11 */ \
2737  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2738  "mr 11,%1\n\t" \
2739  "mr %0,3\n\t" \
2740  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2741  VALGRIND_RESTORE_STACK \
2742  : /*out*/ "=r" (_res) \
2743  : /*in*/ "r" (&_argvec[2]) \
2744  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2745  ); \
2746  lval = (__typeof__(lval)) _res; \
2747  } while (0)
2748 
2749 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2750  arg7,arg8) \
2751  do { \
2752  volatile OrigFn _orig = (orig); \
2753  volatile unsigned long _argvec[3+8]; \
2754  volatile unsigned long _res; \
2755  /* _argvec[0] holds current r2 across the call */ \
2756  _argvec[1] = (unsigned long)_orig.r2; \
2757  _argvec[2] = (unsigned long)_orig.nraddr; \
2758  _argvec[2+1] = (unsigned long)arg1; \
2759  _argvec[2+2] = (unsigned long)arg2; \
2760  _argvec[2+3] = (unsigned long)arg3; \
2761  _argvec[2+4] = (unsigned long)arg4; \
2762  _argvec[2+5] = (unsigned long)arg5; \
2763  _argvec[2+6] = (unsigned long)arg6; \
2764  _argvec[2+7] = (unsigned long)arg7; \
2765  _argvec[2+8] = (unsigned long)arg8; \
2766  __asm__ volatile( \
2767  VALGRIND_ALIGN_STACK \
2768  "mr 11,%1\n\t" \
2769  "std 2,-16(11)\n\t" /* save tocptr */ \
2770  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2771  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2772  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2773  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2774  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2775  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2776  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2777  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2778  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2779  "ld 11, 0(11)\n\t" /* target->r11 */ \
2780  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2781  "mr 11,%1\n\t" \
2782  "mr %0,3\n\t" \
2783  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2784  VALGRIND_RESTORE_STACK \
2785  : /*out*/ "=r" (_res) \
2786  : /*in*/ "r" (&_argvec[2]) \
2787  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2788  ); \
2789  lval = (__typeof__(lval)) _res; \
2790  } while (0)
2791 
2792 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2793  arg7,arg8,arg9) \
2794  do { \
2795  volatile OrigFn _orig = (orig); \
2796  volatile unsigned long _argvec[3+9]; \
2797  volatile unsigned long _res; \
2798  /* _argvec[0] holds current r2 across the call */ \
2799  _argvec[1] = (unsigned long)_orig.r2; \
2800  _argvec[2] = (unsigned long)_orig.nraddr; \
2801  _argvec[2+1] = (unsigned long)arg1; \
2802  _argvec[2+2] = (unsigned long)arg2; \
2803  _argvec[2+3] = (unsigned long)arg3; \
2804  _argvec[2+4] = (unsigned long)arg4; \
2805  _argvec[2+5] = (unsigned long)arg5; \
2806  _argvec[2+6] = (unsigned long)arg6; \
2807  _argvec[2+7] = (unsigned long)arg7; \
2808  _argvec[2+8] = (unsigned long)arg8; \
2809  _argvec[2+9] = (unsigned long)arg9; \
2810  __asm__ volatile( \
2811  VALGRIND_ALIGN_STACK \
2812  "mr 11,%1\n\t" \
2813  "std 2,-16(11)\n\t" /* save tocptr */ \
2814  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2815  "addi 1,1,-128\n\t" /* expand stack frame */ \
2816  /* arg9 */ \
2817  "ld 3,72(11)\n\t" \
2818  "std 3,112(1)\n\t" \
2819  /* args1-8 */ \
2820  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2821  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2822  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2823  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2824  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2825  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2826  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2827  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2828  "ld 11, 0(11)\n\t" /* target->r11 */ \
2829  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2830  "mr 11,%1\n\t" \
2831  "mr %0,3\n\t" \
2832  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2833  VALGRIND_RESTORE_STACK \
2834  : /*out*/ "=r" (_res) \
2835  : /*in*/ "r" (&_argvec[2]) \
2836  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2837  ); \
2838  lval = (__typeof__(lval)) _res; \
2839  } while (0)
2840 
2841 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2842  arg7,arg8,arg9,arg10) \
2843  do { \
2844  volatile OrigFn _orig = (orig); \
2845  volatile unsigned long _argvec[3+10]; \
2846  volatile unsigned long _res; \
2847  /* _argvec[0] holds current r2 across the call */ \
2848  _argvec[1] = (unsigned long)_orig.r2; \
2849  _argvec[2] = (unsigned long)_orig.nraddr; \
2850  _argvec[2+1] = (unsigned long)arg1; \
2851  _argvec[2+2] = (unsigned long)arg2; \
2852  _argvec[2+3] = (unsigned long)arg3; \
2853  _argvec[2+4] = (unsigned long)arg4; \
2854  _argvec[2+5] = (unsigned long)arg5; \
2855  _argvec[2+6] = (unsigned long)arg6; \
2856  _argvec[2+7] = (unsigned long)arg7; \
2857  _argvec[2+8] = (unsigned long)arg8; \
2858  _argvec[2+9] = (unsigned long)arg9; \
2859  _argvec[2+10] = (unsigned long)arg10; \
2860  __asm__ volatile( \
2861  VALGRIND_ALIGN_STACK \
2862  "mr 11,%1\n\t" \
2863  "std 2,-16(11)\n\t" /* save tocptr */ \
2864  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2865  "addi 1,1,-128\n\t" /* expand stack frame */ \
2866  /* arg10 */ \
2867  "ld 3,80(11)\n\t" \
2868  "std 3,120(1)\n\t" \
2869  /* arg9 */ \
2870  "ld 3,72(11)\n\t" \
2871  "std 3,112(1)\n\t" \
2872  /* args1-8 */ \
2873  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2874  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2875  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2876  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2877  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2878  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2879  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2880  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2881  "ld 11, 0(11)\n\t" /* target->r11 */ \
2882  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2883  "mr 11,%1\n\t" \
2884  "mr %0,3\n\t" \
2885  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2886  VALGRIND_RESTORE_STACK \
2887  : /*out*/ "=r" (_res) \
2888  : /*in*/ "r" (&_argvec[2]) \
2889  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2890  ); \
2891  lval = (__typeof__(lval)) _res; \
2892  } while (0)
2893 
2894 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2895  arg7,arg8,arg9,arg10,arg11) \
2896  do { \
2897  volatile OrigFn _orig = (orig); \
2898  volatile unsigned long _argvec[3+11]; \
2899  volatile unsigned long _res; \
2900  /* _argvec[0] holds current r2 across the call */ \
2901  _argvec[1] = (unsigned long)_orig.r2; \
2902  _argvec[2] = (unsigned long)_orig.nraddr; \
2903  _argvec[2+1] = (unsigned long)arg1; \
2904  _argvec[2+2] = (unsigned long)arg2; \
2905  _argvec[2+3] = (unsigned long)arg3; \
2906  _argvec[2+4] = (unsigned long)arg4; \
2907  _argvec[2+5] = (unsigned long)arg5; \
2908  _argvec[2+6] = (unsigned long)arg6; \
2909  _argvec[2+7] = (unsigned long)arg7; \
2910  _argvec[2+8] = (unsigned long)arg8; \
2911  _argvec[2+9] = (unsigned long)arg9; \
2912  _argvec[2+10] = (unsigned long)arg10; \
2913  _argvec[2+11] = (unsigned long)arg11; \
2914  __asm__ volatile( \
2915  VALGRIND_ALIGN_STACK \
2916  "mr 11,%1\n\t" \
2917  "std 2,-16(11)\n\t" /* save tocptr */ \
2918  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2919  "addi 1,1,-144\n\t" /* expand stack frame */ \
2920  /* arg11 */ \
2921  "ld 3,88(11)\n\t" \
2922  "std 3,128(1)\n\t" \
2923  /* arg10 */ \
2924  "ld 3,80(11)\n\t" \
2925  "std 3,120(1)\n\t" \
2926  /* arg9 */ \
2927  "ld 3,72(11)\n\t" \
2928  "std 3,112(1)\n\t" \
2929  /* args1-8 */ \
2930  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2931  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2932  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2933  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2934  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2935  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2936  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2937  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2938  "ld 11, 0(11)\n\t" /* target->r11 */ \
2939  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2940  "mr 11,%1\n\t" \
2941  "mr %0,3\n\t" \
2942  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2943  VALGRIND_RESTORE_STACK \
2944  : /*out*/ "=r" (_res) \
2945  : /*in*/ "r" (&_argvec[2]) \
2946  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2947  ); \
2948  lval = (__typeof__(lval)) _res; \
2949  } while (0)
2950 
2951 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2952  arg7,arg8,arg9,arg10,arg11,arg12) \
2953  do { \
2954  volatile OrigFn _orig = (orig); \
2955  volatile unsigned long _argvec[3+12]; \
2956  volatile unsigned long _res; \
2957  /* _argvec[0] holds current r2 across the call */ \
2958  _argvec[1] = (unsigned long)_orig.r2; \
2959  _argvec[2] = (unsigned long)_orig.nraddr; \
2960  _argvec[2+1] = (unsigned long)arg1; \
2961  _argvec[2+2] = (unsigned long)arg2; \
2962  _argvec[2+3] = (unsigned long)arg3; \
2963  _argvec[2+4] = (unsigned long)arg4; \
2964  _argvec[2+5] = (unsigned long)arg5; \
2965  _argvec[2+6] = (unsigned long)arg6; \
2966  _argvec[2+7] = (unsigned long)arg7; \
2967  _argvec[2+8] = (unsigned long)arg8; \
2968  _argvec[2+9] = (unsigned long)arg9; \
2969  _argvec[2+10] = (unsigned long)arg10; \
2970  _argvec[2+11] = (unsigned long)arg11; \
2971  _argvec[2+12] = (unsigned long)arg12; \
2972  __asm__ volatile( \
2973  VALGRIND_ALIGN_STACK \
2974  "mr 11,%1\n\t" \
2975  "std 2,-16(11)\n\t" /* save tocptr */ \
2976  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2977  "addi 1,1,-144\n\t" /* expand stack frame */ \
2978  /* arg12 */ \
2979  "ld 3,96(11)\n\t" \
2980  "std 3,136(1)\n\t" \
2981  /* arg11 */ \
2982  "ld 3,88(11)\n\t" \
2983  "std 3,128(1)\n\t" \
2984  /* arg10 */ \
2985  "ld 3,80(11)\n\t" \
2986  "std 3,120(1)\n\t" \
2987  /* arg9 */ \
2988  "ld 3,72(11)\n\t" \
2989  "std 3,112(1)\n\t" \
2990  /* args1-8 */ \
2991  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2992  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2993  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2994  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2995  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2996  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2997  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2998  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2999  "ld 11, 0(11)\n\t" /* target->r11 */ \
3000  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3001  "mr 11,%1\n\t" \
3002  "mr %0,3\n\t" \
3003  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3004  VALGRIND_RESTORE_STACK \
3005  : /*out*/ "=r" (_res) \
3006  : /*in*/ "r" (&_argvec[2]) \
3007  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3008  ); \
3009  lval = (__typeof__(lval)) _res; \
3010  } while (0)
3011 
3012 #endif /* PLAT_ppc64_linux */
3013 
3014 /* ------------------------- arm-linux ------------------------- */
3015 
3016 #if defined(PLAT_arm_linux)
3017 
3018 /* These regs are trashed by the hidden call. */
3019 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3020 
3021 /* Macros to save and align the stack before making a function
3022  call and restore it afterwards as gcc may not keep the stack
3023  pointer aligned if it doesn't realise calls are being made
3024  to other functions. */
3025 
3026 /* This is a bit tricky. We store the original stack pointer in r10
3027  as it is callee-saves. gcc doesn't allow the use of r11 for some
3028  reason. Also, we can't directly "bic" the stack pointer in thumb
3029  mode since r13 isn't an allowed register number in that context.
3030  So use r4 as a temporary, since that is about to get trashed
3031  anyway, just after each use of this macro. Side effect is we need
3032  to be very careful about any future changes, since
3033  VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3034 #define VALGRIND_ALIGN_STACK \
3035  "mov r10, sp\n\t" \
3036  "mov r4, sp\n\t" \
3037  "bic r4, r4, #7\n\t" \
3038  "mov sp, r4\n\t"
3039 #define VALGRIND_RESTORE_STACK \
3040  "mov sp, r10\n\t"
3041 
3042 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3043  long) == 4. */
3044 
3045 #define CALL_FN_W_v(lval, orig) \
3046  do { \
3047  volatile OrigFn _orig = (orig); \
3048  volatile unsigned long _argvec[1]; \
3049  volatile unsigned long _res; \
3050  _argvec[0] = (unsigned long)_orig.nraddr; \
3051  __asm__ volatile( \
3052  VALGRIND_ALIGN_STACK \
3053  "ldr r4, [%1] \n\t" /* target->r4 */ \
3054  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3055  VALGRIND_RESTORE_STACK \
3056  "mov %0, r0\n" \
3057  : /*out*/ "=r" (_res) \
3058  : /*in*/ "0" (&_argvec[0]) \
3059  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3060  ); \
3061  lval = (__typeof__(lval)) _res; \
3062  } while (0)
3063 
3064 #define CALL_FN_W_W(lval, orig, arg1) \
3065  do { \
3066  volatile OrigFn _orig = (orig); \
3067  volatile unsigned long _argvec[2]; \
3068  volatile unsigned long _res; \
3069  _argvec[0] = (unsigned long)_orig.nraddr; \
3070  _argvec[1] = (unsigned long)(arg1); \
3071  __asm__ volatile( \
3072  VALGRIND_ALIGN_STACK \
3073  "ldr r0, [%1, #4] \n\t" \
3074  "ldr r4, [%1] \n\t" /* target->r4 */ \
3075  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3076  VALGRIND_RESTORE_STACK \
3077  "mov %0, r0\n" \
3078  : /*out*/ "=r" (_res) \
3079  : /*in*/ "0" (&_argvec[0]) \
3080  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3081  ); \
3082  lval = (__typeof__(lval)) _res; \
3083  } while (0)
3084 
3085 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3086  do { \
3087  volatile OrigFn _orig = (orig); \
3088  volatile unsigned long _argvec[3]; \
3089  volatile unsigned long _res; \
3090  _argvec[0] = (unsigned long)_orig.nraddr; \
3091  _argvec[1] = (unsigned long)(arg1); \
3092  _argvec[2] = (unsigned long)(arg2); \
3093  __asm__ volatile( \
3094  VALGRIND_ALIGN_STACK \
3095  "ldr r0, [%1, #4] \n\t" \
3096  "ldr r1, [%1, #8] \n\t" \
3097  "ldr r4, [%1] \n\t" /* target->r4 */ \
3098  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3099  VALGRIND_RESTORE_STACK \
3100  "mov %0, r0\n" \
3101  : /*out*/ "=r" (_res) \
3102  : /*in*/ "0" (&_argvec[0]) \
3103  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3104  ); \
3105  lval = (__typeof__(lval)) _res; \
3106  } while (0)
3107 
3108 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3109  do { \
3110  volatile OrigFn _orig = (orig); \
3111  volatile unsigned long _argvec[4]; \
3112  volatile unsigned long _res; \
3113  _argvec[0] = (unsigned long)_orig.nraddr; \
3114  _argvec[1] = (unsigned long)(arg1); \
3115  _argvec[2] = (unsigned long)(arg2); \
3116  _argvec[3] = (unsigned long)(arg3); \
3117  __asm__ volatile( \
3118  VALGRIND_ALIGN_STACK \
3119  "ldr r0, [%1, #4] \n\t" \
3120  "ldr r1, [%1, #8] \n\t" \
3121  "ldr r2, [%1, #12] \n\t" \
3122  "ldr r4, [%1] \n\t" /* target->r4 */ \
3123  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3124  VALGRIND_RESTORE_STACK \
3125  "mov %0, r0\n" \
3126  : /*out*/ "=r" (_res) \
3127  : /*in*/ "0" (&_argvec[0]) \
3128  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3129  ); \
3130  lval = (__typeof__(lval)) _res; \
3131  } while (0)
3132 
3133 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3134  do { \
3135  volatile OrigFn _orig = (orig); \
3136  volatile unsigned long _argvec[5]; \
3137  volatile unsigned long _res; \
3138  _argvec[0] = (unsigned long)_orig.nraddr; \
3139  _argvec[1] = (unsigned long)(arg1); \
3140  _argvec[2] = (unsigned long)(arg2); \
3141  _argvec[3] = (unsigned long)(arg3); \
3142  _argvec[4] = (unsigned long)(arg4); \
3143  __asm__ volatile( \
3144  VALGRIND_ALIGN_STACK \
3145  "ldr r0, [%1, #4] \n\t" \
3146  "ldr r1, [%1, #8] \n\t" \
3147  "ldr r2, [%1, #12] \n\t" \
3148  "ldr r3, [%1, #16] \n\t" \
3149  "ldr r4, [%1] \n\t" /* target->r4 */ \
3150  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3151  VALGRIND_RESTORE_STACK \
3152  "mov %0, r0" \
3153  : /*out*/ "=r" (_res) \
3154  : /*in*/ "0" (&_argvec[0]) \
3155  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3156  ); \
3157  lval = (__typeof__(lval)) _res; \
3158  } while (0)
3159 
3160 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3161  do { \
3162  volatile OrigFn _orig = (orig); \
3163  volatile unsigned long _argvec[6]; \
3164  volatile unsigned long _res; \
3165  _argvec[0] = (unsigned long)_orig.nraddr; \
3166  _argvec[1] = (unsigned long)(arg1); \
3167  _argvec[2] = (unsigned long)(arg2); \
3168  _argvec[3] = (unsigned long)(arg3); \
3169  _argvec[4] = (unsigned long)(arg4); \
3170  _argvec[5] = (unsigned long)(arg5); \
3171  __asm__ volatile( \
3172  VALGRIND_ALIGN_STACK \
3173  "sub sp, sp, #4 \n\t" \
3174  "ldr r0, [%1, #20] \n\t" \
3175  "push {r0} \n\t" \
3176  "ldr r0, [%1, #4] \n\t" \
3177  "ldr r1, [%1, #8] \n\t" \
3178  "ldr r2, [%1, #12] \n\t" \
3179  "ldr r3, [%1, #16] \n\t" \
3180  "ldr r4, [%1] \n\t" /* target->r4 */ \
3181  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3182  VALGRIND_RESTORE_STACK \
3183  "mov %0, r0" \
3184  : /*out*/ "=r" (_res) \
3185  : /*in*/ "0" (&_argvec[0]) \
3186  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3187  ); \
3188  lval = (__typeof__(lval)) _res; \
3189  } while (0)
3190 
3191 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3192  do { \
3193  volatile OrigFn _orig = (orig); \
3194  volatile unsigned long _argvec[7]; \
3195  volatile unsigned long _res; \
3196  _argvec[0] = (unsigned long)_orig.nraddr; \
3197  _argvec[1] = (unsigned long)(arg1); \
3198  _argvec[2] = (unsigned long)(arg2); \
3199  _argvec[3] = (unsigned long)(arg3); \
3200  _argvec[4] = (unsigned long)(arg4); \
3201  _argvec[5] = (unsigned long)(arg5); \
3202  _argvec[6] = (unsigned long)(arg6); \
3203  __asm__ volatile( \
3204  VALGRIND_ALIGN_STACK \
3205  "ldr r0, [%1, #20] \n\t" \
3206  "ldr r1, [%1, #24] \n\t" \
3207  "push {r0, r1} \n\t" \
3208  "ldr r0, [%1, #4] \n\t" \
3209  "ldr r1, [%1, #8] \n\t" \
3210  "ldr r2, [%1, #12] \n\t" \
3211  "ldr r3, [%1, #16] \n\t" \
3212  "ldr r4, [%1] \n\t" /* target->r4 */ \
3213  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3214  VALGRIND_RESTORE_STACK \
3215  "mov %0, r0" \
3216  : /*out*/ "=r" (_res) \
3217  : /*in*/ "0" (&_argvec[0]) \
3218  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3219  ); \
3220  lval = (__typeof__(lval)) _res; \
3221  } while (0)
3222 
3223 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3224  arg7) \
3225  do { \
3226  volatile OrigFn _orig = (orig); \
3227  volatile unsigned long _argvec[8]; \
3228  volatile unsigned long _res; \
3229  _argvec[0] = (unsigned long)_orig.nraddr; \
3230  _argvec[1] = (unsigned long)(arg1); \
3231  _argvec[2] = (unsigned long)(arg2); \
3232  _argvec[3] = (unsigned long)(arg3); \
3233  _argvec[4] = (unsigned long)(arg4); \
3234  _argvec[5] = (unsigned long)(arg5); \
3235  _argvec[6] = (unsigned long)(arg6); \
3236  _argvec[7] = (unsigned long)(arg7); \
3237  __asm__ volatile( \
3238  VALGRIND_ALIGN_STACK \
3239  "sub sp, sp, #4 \n\t" \
3240  "ldr r0, [%1, #20] \n\t" \
3241  "ldr r1, [%1, #24] \n\t" \
3242  "ldr r2, [%1, #28] \n\t" \
3243  "push {r0, r1, r2} \n\t" \
3244  "ldr r0, [%1, #4] \n\t" \
3245  "ldr r1, [%1, #8] \n\t" \
3246  "ldr r2, [%1, #12] \n\t" \
3247  "ldr r3, [%1, #16] \n\t" \
3248  "ldr r4, [%1] \n\t" /* target->r4 */ \
3249  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3250  VALGRIND_RESTORE_STACK \
3251  "mov %0, r0" \
3252  : /*out*/ "=r" (_res) \
3253  : /*in*/ "0" (&_argvec[0]) \
3254  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3255  ); \
3256  lval = (__typeof__(lval)) _res; \
3257  } while (0)
3258 
3259 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3260  arg7,arg8) \
3261  do { \
3262  volatile OrigFn _orig = (orig); \
3263  volatile unsigned long _argvec[9]; \
3264  volatile unsigned long _res; \
3265  _argvec[0] = (unsigned long)_orig.nraddr; \
3266  _argvec[1] = (unsigned long)(arg1); \
3267  _argvec[2] = (unsigned long)(arg2); \
3268  _argvec[3] = (unsigned long)(arg3); \
3269  _argvec[4] = (unsigned long)(arg4); \
3270  _argvec[5] = (unsigned long)(arg5); \
3271  _argvec[6] = (unsigned long)(arg6); \
3272  _argvec[7] = (unsigned long)(arg7); \
3273  _argvec[8] = (unsigned long)(arg8); \
3274  __asm__ volatile( \
3275  VALGRIND_ALIGN_STACK \
3276  "ldr r0, [%1, #20] \n\t" \
3277  "ldr r1, [%1, #24] \n\t" \
3278  "ldr r2, [%1, #28] \n\t" \
3279  "ldr r3, [%1, #32] \n\t" \
3280  "push {r0, r1, r2, r3} \n\t" \
3281  "ldr r0, [%1, #4] \n\t" \
3282  "ldr r1, [%1, #8] \n\t" \
3283  "ldr r2, [%1, #12] \n\t" \
3284  "ldr r3, [%1, #16] \n\t" \
3285  "ldr r4, [%1] \n\t" /* target->r4 */ \
3286  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3287  VALGRIND_RESTORE_STACK \
3288  "mov %0, r0" \
3289  : /*out*/ "=r" (_res) \
3290  : /*in*/ "0" (&_argvec[0]) \
3291  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3292  ); \
3293  lval = (__typeof__(lval)) _res; \
3294  } while (0)
3295 
3296 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3297  arg7,arg8,arg9) \
3298  do { \
3299  volatile OrigFn _orig = (orig); \
3300  volatile unsigned long _argvec[10]; \
3301  volatile unsigned long _res; \
3302  _argvec[0] = (unsigned long)_orig.nraddr; \
3303  _argvec[1] = (unsigned long)(arg1); \
3304  _argvec[2] = (unsigned long)(arg2); \
3305  _argvec[3] = (unsigned long)(arg3); \
3306  _argvec[4] = (unsigned long)(arg4); \
3307  _argvec[5] = (unsigned long)(arg5); \
3308  _argvec[6] = (unsigned long)(arg6); \
3309  _argvec[7] = (unsigned long)(arg7); \
3310  _argvec[8] = (unsigned long)(arg8); \
3311  _argvec[9] = (unsigned long)(arg9); \
3312  __asm__ volatile( \
3313  VALGRIND_ALIGN_STACK \
3314  "sub sp, sp, #4 \n\t" \
3315  "ldr r0, [%1, #20] \n\t" \
3316  "ldr r1, [%1, #24] \n\t" \
3317  "ldr r2, [%1, #28] \n\t" \
3318  "ldr r3, [%1, #32] \n\t" \
3319  "ldr r4, [%1, #36] \n\t" \
3320  "push {r0, r1, r2, r3, r4} \n\t" \
3321  "ldr r0, [%1, #4] \n\t" \
3322  "ldr r1, [%1, #8] \n\t" \
3323  "ldr r2, [%1, #12] \n\t" \
3324  "ldr r3, [%1, #16] \n\t" \
3325  "ldr r4, [%1] \n\t" /* target->r4 */ \
3326  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3327  VALGRIND_RESTORE_STACK \
3328  "mov %0, r0" \
3329  : /*out*/ "=r" (_res) \
3330  : /*in*/ "0" (&_argvec[0]) \
3331  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3332  ); \
3333  lval = (__typeof__(lval)) _res; \
3334  } while (0)
3335 
3336 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3337  arg7,arg8,arg9,arg10) \
3338  do { \
3339  volatile OrigFn _orig = (orig); \
3340  volatile unsigned long _argvec[11]; \
3341  volatile unsigned long _res; \
3342  _argvec[0] = (unsigned long)_orig.nraddr; \
3343  _argvec[1] = (unsigned long)(arg1); \
3344  _argvec[2] = (unsigned long)(arg2); \
3345  _argvec[3] = (unsigned long)(arg3); \
3346  _argvec[4] = (unsigned long)(arg4); \
3347  _argvec[5] = (unsigned long)(arg5); \
3348  _argvec[6] = (unsigned long)(arg6); \
3349  _argvec[7] = (unsigned long)(arg7); \
3350  _argvec[8] = (unsigned long)(arg8); \
3351  _argvec[9] = (unsigned long)(arg9); \
3352  _argvec[10] = (unsigned long)(arg10); \
3353  __asm__ volatile( \
3354  VALGRIND_ALIGN_STACK \
3355  "ldr r0, [%1, #40] \n\t" \
3356  "push {r0} \n\t" \
3357  "ldr r0, [%1, #20] \n\t" \
3358  "ldr r1, [%1, #24] \n\t" \
3359  "ldr r2, [%1, #28] \n\t" \
3360  "ldr r3, [%1, #32] \n\t" \
3361  "ldr r4, [%1, #36] \n\t" \
3362  "push {r0, r1, r2, r3, r4} \n\t" \
3363  "ldr r0, [%1, #4] \n\t" \
3364  "ldr r1, [%1, #8] \n\t" \
3365  "ldr r2, [%1, #12] \n\t" \
3366  "ldr r3, [%1, #16] \n\t" \
3367  "ldr r4, [%1] \n\t" /* target->r4 */ \
3368  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3369  VALGRIND_RESTORE_STACK \
3370  "mov %0, r0" \
3371  : /*out*/ "=r" (_res) \
3372  : /*in*/ "0" (&_argvec[0]) \
3373  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3374  ); \
3375  lval = (__typeof__(lval)) _res; \
3376  } while (0)
3377 
3378 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3379  arg6,arg7,arg8,arg9,arg10, \
3380  arg11) \
3381  do { \
3382  volatile OrigFn _orig = (orig); \
3383  volatile unsigned long _argvec[12]; \
3384  volatile unsigned long _res; \
3385  _argvec[0] = (unsigned long)_orig.nraddr; \
3386  _argvec[1] = (unsigned long)(arg1); \
3387  _argvec[2] = (unsigned long)(arg2); \
3388  _argvec[3] = (unsigned long)(arg3); \
3389  _argvec[4] = (unsigned long)(arg4); \
3390  _argvec[5] = (unsigned long)(arg5); \
3391  _argvec[6] = (unsigned long)(arg6); \
3392  _argvec[7] = (unsigned long)(arg7); \
3393  _argvec[8] = (unsigned long)(arg8); \
3394  _argvec[9] = (unsigned long)(arg9); \
3395  _argvec[10] = (unsigned long)(arg10); \
3396  _argvec[11] = (unsigned long)(arg11); \
3397  __asm__ volatile( \
3398  VALGRIND_ALIGN_STACK \
3399  "sub sp, sp, #4 \n\t" \
3400  "ldr r0, [%1, #40] \n\t" \
3401  "ldr r1, [%1, #44] \n\t" \
3402  "push {r0, r1} \n\t" \
3403  "ldr r0, [%1, #20] \n\t" \
3404  "ldr r1, [%1, #24] \n\t" \
3405  "ldr r2, [%1, #28] \n\t" \
3406  "ldr r3, [%1, #32] \n\t" \
3407  "ldr r4, [%1, #36] \n\t" \
3408  "push {r0, r1, r2, r3, r4} \n\t" \
3409  "ldr r0, [%1, #4] \n\t" \
3410  "ldr r1, [%1, #8] \n\t" \
3411  "ldr r2, [%1, #12] \n\t" \
3412  "ldr r3, [%1, #16] \n\t" \
3413  "ldr r4, [%1] \n\t" /* target->r4 */ \
3414  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3415  VALGRIND_RESTORE_STACK \
3416  "mov %0, r0" \
3417  : /*out*/ "=r" (_res) \
3418  : /*in*/ "0" (&_argvec[0]) \
3419  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3420  ); \
3421  lval = (__typeof__(lval)) _res; \
3422  } while (0)
3423 
3424 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3425  arg6,arg7,arg8,arg9,arg10, \
3426  arg11,arg12) \
3427  do { \
3428  volatile OrigFn _orig = (orig); \
3429  volatile unsigned long _argvec[13]; \
3430  volatile unsigned long _res; \
3431  _argvec[0] = (unsigned long)_orig.nraddr; \
3432  _argvec[1] = (unsigned long)(arg1); \
3433  _argvec[2] = (unsigned long)(arg2); \
3434  _argvec[3] = (unsigned long)(arg3); \
3435  _argvec[4] = (unsigned long)(arg4); \
3436  _argvec[5] = (unsigned long)(arg5); \
3437  _argvec[6] = (unsigned long)(arg6); \
3438  _argvec[7] = (unsigned long)(arg7); \
3439  _argvec[8] = (unsigned long)(arg8); \
3440  _argvec[9] = (unsigned long)(arg9); \
3441  _argvec[10] = (unsigned long)(arg10); \
3442  _argvec[11] = (unsigned long)(arg11); \
3443  _argvec[12] = (unsigned long)(arg12); \
3444  __asm__ volatile( \
3445  VALGRIND_ALIGN_STACK \
3446  "ldr r0, [%1, #40] \n\t" \
3447  "ldr r1, [%1, #44] \n\t" \
3448  "ldr r2, [%1, #48] \n\t" \
3449  "push {r0, r1, r2} \n\t" \
3450  "ldr r0, [%1, #20] \n\t" \
3451  "ldr r1, [%1, #24] \n\t" \
3452  "ldr r2, [%1, #28] \n\t" \
3453  "ldr r3, [%1, #32] \n\t" \
3454  "ldr r4, [%1, #36] \n\t" \
3455  "push {r0, r1, r2, r3, r4} \n\t" \
3456  "ldr r0, [%1, #4] \n\t" \
3457  "ldr r1, [%1, #8] \n\t" \
3458  "ldr r2, [%1, #12] \n\t" \
3459  "ldr r3, [%1, #16] \n\t" \
3460  "ldr r4, [%1] \n\t" /* target->r4 */ \
3461  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3462  VALGRIND_RESTORE_STACK \
3463  "mov %0, r0" \
3464  : /*out*/ "=r" (_res) \
3465  : /*in*/ "0" (&_argvec[0]) \
3466  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3467  ); \
3468  lval = (__typeof__(lval)) _res; \
3469  } while (0)
3470 
3471 #endif /* PLAT_arm_linux */
3472 
3473 /* ------------------------- s390x-linux ------------------------- */
3474 
3475 #if defined(PLAT_s390x_linux)
3476 
3477 /* Similar workaround as amd64 (see above), but we use r11 as frame
3478  pointer and save the old r11 in r7. r11 might be used for
3479  argvec, therefore we copy argvec in r1 since r1 is clobbered
3480  after the call anyway. */
3481 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
3482 # define __FRAME_POINTER \
3483  ,"d"(__builtin_dwarf_cfa())
3484 # define VALGRIND_CFI_PROLOGUE \
3485  ".cfi_remember_state\n\t" \
3486  "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3487  "lgr 7,11\n\t" \
3488  "lgr 11,%2\n\t" \
3489  ".cfi_def_cfa r11, 0\n\t"
3490 # define VALGRIND_CFI_EPILOGUE \
3491  "lgr 11, 7\n\t" \
3492  ".cfi_restore_state\n\t"
3493 #else
3494 # define __FRAME_POINTER
3495 # define VALGRIND_CFI_PROLOGUE \
3496  "lgr 1,%1\n\t"
3497 # define VALGRIND_CFI_EPILOGUE
3498 #endif
3499 
3500 /* Nb: On s390 the stack pointer is properly aligned *at all times*
3501  according to the s390 GCC maintainer. (The ABI specification is not
3502  precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
3503  VALGRIND_RESTORE_STACK are not defined here. */
3504 
3505 /* These regs are trashed by the hidden call. Note that we overwrite
3506  r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3507  function a proper return address. All others are ABI defined call
3508  clobbers. */
3509 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
3510  "f0","f1","f2","f3","f4","f5","f6","f7"
3511 
3512 /* Nb: Although r11 is modified in the asm snippets below (inside
3513  VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
3514  two reasons:
3515  (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
3516  modified
3517  (2) GCC will complain that r11 cannot appear inside a clobber section,
3518  when compiled with -O -fno-omit-frame-pointer
3519  */
3520 
3521 #define CALL_FN_W_v(lval, orig) \
3522  do { \
3523  volatile OrigFn _orig = (orig); \
3524  volatile unsigned long _argvec[1]; \
3525  volatile unsigned long _res; \
3526  _argvec[0] = (unsigned long)_orig.nraddr; \
3527  __asm__ volatile( \
3528  VALGRIND_CFI_PROLOGUE \
3529  "aghi 15,-160\n\t" \
3530  "lg 1, 0(1)\n\t" /* target->r1 */ \
3531  VALGRIND_CALL_NOREDIR_R1 \
3532  "lgr %0, 2\n\t" \
3533  "aghi 15,160\n\t" \
3534  VALGRIND_CFI_EPILOGUE \
3535  : /*out*/ "=d" (_res) \
3536  : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
3537  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3538  ); \
3539  lval = (__typeof__(lval)) _res; \
3540  } while (0)
3541 
3542 /* The call abi has the arguments in r2-r6 and stack */
3543 #define CALL_FN_W_W(lval, orig, arg1) \
3544  do { \
3545  volatile OrigFn _orig = (orig); \
3546  volatile unsigned long _argvec[2]; \
3547  volatile unsigned long _res; \
3548  _argvec[0] = (unsigned long)_orig.nraddr; \
3549  _argvec[1] = (unsigned long)arg1; \
3550  __asm__ volatile( \
3551  VALGRIND_CFI_PROLOGUE \
3552  "aghi 15,-160\n\t" \
3553  "lg 2, 8(1)\n\t" \
3554  "lg 1, 0(1)\n\t" \
3555  VALGRIND_CALL_NOREDIR_R1 \
3556  "lgr %0, 2\n\t" \
3557  "aghi 15,160\n\t" \
3558  VALGRIND_CFI_EPILOGUE \
3559  : /*out*/ "=d" (_res) \
3560  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3561  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3562  ); \
3563  lval = (__typeof__(lval)) _res; \
3564  } while (0)
3565 
3566 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3567  do { \
3568  volatile OrigFn _orig = (orig); \
3569  volatile unsigned long _argvec[3]; \
3570  volatile unsigned long _res; \
3571  _argvec[0] = (unsigned long)_orig.nraddr; \
3572  _argvec[1] = (unsigned long)arg1; \
3573  _argvec[2] = (unsigned long)arg2; \
3574  __asm__ volatile( \
3575  VALGRIND_CFI_PROLOGUE \
3576  "aghi 15,-160\n\t" \
3577  "lg 2, 8(1)\n\t" \
3578  "lg 3,16(1)\n\t" \
3579  "lg 1, 0(1)\n\t" \
3580  VALGRIND_CALL_NOREDIR_R1 \
3581  "lgr %0, 2\n\t" \
3582  "aghi 15,160\n\t" \
3583  VALGRIND_CFI_EPILOGUE \
3584  : /*out*/ "=d" (_res) \
3585  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3586  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3587  ); \
3588  lval = (__typeof__(lval)) _res; \
3589  } while (0)
3590 
3591 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3592  do { \
3593  volatile OrigFn _orig = (orig); \
3594  volatile unsigned long _argvec[4]; \
3595  volatile unsigned long _res; \
3596  _argvec[0] = (unsigned long)_orig.nraddr; \
3597  _argvec[1] = (unsigned long)arg1; \
3598  _argvec[2] = (unsigned long)arg2; \
3599  _argvec[3] = (unsigned long)arg3; \
3600  __asm__ volatile( \
3601  VALGRIND_CFI_PROLOGUE \
3602  "aghi 15,-160\n\t" \
3603  "lg 2, 8(1)\n\t" \
3604  "lg 3,16(1)\n\t" \
3605  "lg 4,24(1)\n\t" \
3606  "lg 1, 0(1)\n\t" \
3607  VALGRIND_CALL_NOREDIR_R1 \
3608  "lgr %0, 2\n\t" \
3609  "aghi 15,160\n\t" \
3610  VALGRIND_CFI_EPILOGUE \
3611  : /*out*/ "=d" (_res) \
3612  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3613  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3614  ); \
3615  lval = (__typeof__(lval)) _res; \
3616  } while (0)
3617 
3618 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3619  do { \
3620  volatile OrigFn _orig = (orig); \
3621  volatile unsigned long _argvec[5]; \
3622  volatile unsigned long _res; \
3623  _argvec[0] = (unsigned long)_orig.nraddr; \
3624  _argvec[1] = (unsigned long)arg1; \
3625  _argvec[2] = (unsigned long)arg2; \
3626  _argvec[3] = (unsigned long)arg3; \
3627  _argvec[4] = (unsigned long)arg4; \
3628  __asm__ volatile( \
3629  VALGRIND_CFI_PROLOGUE \
3630  "aghi 15,-160\n\t" \
3631  "lg 2, 8(1)\n\t" \
3632  "lg 3,16(1)\n\t" \
3633  "lg 4,24(1)\n\t" \
3634  "lg 5,32(1)\n\t" \
3635  "lg 1, 0(1)\n\t" \
3636  VALGRIND_CALL_NOREDIR_R1 \
3637  "lgr %0, 2\n\t" \
3638  "aghi 15,160\n\t" \
3639  VALGRIND_CFI_EPILOGUE \
3640  : /*out*/ "=d" (_res) \
3641  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3642  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3643  ); \
3644  lval = (__typeof__(lval)) _res; \
3645  } while (0)
3646 
3647 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3648  do { \
3649  volatile OrigFn _orig = (orig); \
3650  volatile unsigned long _argvec[6]; \
3651  volatile unsigned long _res; \
3652  _argvec[0] = (unsigned long)_orig.nraddr; \
3653  _argvec[1] = (unsigned long)arg1; \
3654  _argvec[2] = (unsigned long)arg2; \
3655  _argvec[3] = (unsigned long)arg3; \
3656  _argvec[4] = (unsigned long)arg4; \
3657  _argvec[5] = (unsigned long)arg5; \
3658  __asm__ volatile( \
3659  VALGRIND_CFI_PROLOGUE \
3660  "aghi 15,-160\n\t" \
3661  "lg 2, 8(1)\n\t" \
3662  "lg 3,16(1)\n\t" \
3663  "lg 4,24(1)\n\t" \
3664  "lg 5,32(1)\n\t" \
3665  "lg 6,40(1)\n\t" \
3666  "lg 1, 0(1)\n\t" \
3667  VALGRIND_CALL_NOREDIR_R1 \
3668  "lgr %0, 2\n\t" \
3669  "aghi 15,160\n\t" \
3670  VALGRIND_CFI_EPILOGUE \
3671  : /*out*/ "=d" (_res) \
3672  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3673  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3674  ); \
3675  lval = (__typeof__(lval)) _res; \
3676  } while (0)
3677 
3678 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3679  arg6) \
3680  do { \
3681  volatile OrigFn _orig = (orig); \
3682  volatile unsigned long _argvec[7]; \
3683  volatile unsigned long _res; \
3684  _argvec[0] = (unsigned long)_orig.nraddr; \
3685  _argvec[1] = (unsigned long)arg1; \
3686  _argvec[2] = (unsigned long)arg2; \
3687  _argvec[3] = (unsigned long)arg3; \
3688  _argvec[4] = (unsigned long)arg4; \
3689  _argvec[5] = (unsigned long)arg5; \
3690  _argvec[6] = (unsigned long)arg6; \
3691  __asm__ volatile( \
3692  VALGRIND_CFI_PROLOGUE \
3693  "aghi 15,-168\n\t" \
3694  "lg 2, 8(1)\n\t" \
3695  "lg 3,16(1)\n\t" \
3696  "lg 4,24(1)\n\t" \
3697  "lg 5,32(1)\n\t" \
3698  "lg 6,40(1)\n\t" \
3699  "mvc 160(8,15), 48(1)\n\t" \
3700  "lg 1, 0(1)\n\t" \
3701  VALGRIND_CALL_NOREDIR_R1 \
3702  "lgr %0, 2\n\t" \
3703  "aghi 15,168\n\t" \
3704  VALGRIND_CFI_EPILOGUE \
3705  : /*out*/ "=d" (_res) \
3706  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3707  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3708  ); \
3709  lval = (__typeof__(lval)) _res; \
3710  } while (0)
3711 
3712 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3713  arg6, arg7) \
3714  do { \
3715  volatile OrigFn _orig = (orig); \
3716  volatile unsigned long _argvec[8]; \
3717  volatile unsigned long _res; \
3718  _argvec[0] = (unsigned long)_orig.nraddr; \
3719  _argvec[1] = (unsigned long)arg1; \
3720  _argvec[2] = (unsigned long)arg2; \
3721  _argvec[3] = (unsigned long)arg3; \
3722  _argvec[4] = (unsigned long)arg4; \
3723  _argvec[5] = (unsigned long)arg5; \
3724  _argvec[6] = (unsigned long)arg6; \
3725  _argvec[7] = (unsigned long)arg7; \
3726  __asm__ volatile( \
3727  VALGRIND_CFI_PROLOGUE \
3728  "aghi 15,-176\n\t" \
3729  "lg 2, 8(1)\n\t" \
3730  "lg 3,16(1)\n\t" \
3731  "lg 4,24(1)\n\t" \
3732  "lg 5,32(1)\n\t" \
3733  "lg 6,40(1)\n\t" \
3734  "mvc 160(8,15), 48(1)\n\t" \
3735  "mvc 168(8,15), 56(1)\n\t" \
3736  "lg 1, 0(1)\n\t" \
3737  VALGRIND_CALL_NOREDIR_R1 \
3738  "lgr %0, 2\n\t" \
3739  "aghi 15,176\n\t" \
3740  VALGRIND_CFI_EPILOGUE \
3741  : /*out*/ "=d" (_res) \
3742  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3743  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3744  ); \
3745  lval = (__typeof__(lval)) _res; \
3746  } while (0)
3747 
3748 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3749  arg6, arg7 ,arg8) \
3750  do { \
3751  volatile OrigFn _orig = (orig); \
3752  volatile unsigned long _argvec[9]; \
3753  volatile unsigned long _res; \
3754  _argvec[0] = (unsigned long)_orig.nraddr; \
3755  _argvec[1] = (unsigned long)arg1; \
3756  _argvec[2] = (unsigned long)arg2; \
3757  _argvec[3] = (unsigned long)arg3; \
3758  _argvec[4] = (unsigned long)arg4; \
3759  _argvec[5] = (unsigned long)arg5; \
3760  _argvec[6] = (unsigned long)arg6; \
3761  _argvec[7] = (unsigned long)arg7; \
3762  _argvec[8] = (unsigned long)arg8; \
3763  __asm__ volatile( \
3764  VALGRIND_CFI_PROLOGUE \
3765  "aghi 15,-184\n\t" \
3766  "lg 2, 8(1)\n\t" \
3767  "lg 3,16(1)\n\t" \
3768  "lg 4,24(1)\n\t" \
3769  "lg 5,32(1)\n\t" \
3770  "lg 6,40(1)\n\t" \
3771  "mvc 160(8,15), 48(1)\n\t" \
3772  "mvc 168(8,15), 56(1)\n\t" \
3773  "mvc 176(8,15), 64(1)\n\t" \
3774  "lg 1, 0(1)\n\t" \
3775  VALGRIND_CALL_NOREDIR_R1 \
3776  "lgr %0, 2\n\t" \
3777  "aghi 15,184\n\t" \
3778  VALGRIND_CFI_EPILOGUE \
3779  : /*out*/ "=d" (_res) \
3780  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3781  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3782  ); \
3783  lval = (__typeof__(lval)) _res; \
3784  } while (0)
3785 
3786 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3787  arg6, arg7 ,arg8, arg9) \
3788  do { \
3789  volatile OrigFn _orig = (orig); \
3790  volatile unsigned long _argvec[10]; \
3791  volatile unsigned long _res; \
3792  _argvec[0] = (unsigned long)_orig.nraddr; \
3793  _argvec[1] = (unsigned long)arg1; \
3794  _argvec[2] = (unsigned long)arg2; \
3795  _argvec[3] = (unsigned long)arg3; \
3796  _argvec[4] = (unsigned long)arg4; \
3797  _argvec[5] = (unsigned long)arg5; \
3798  _argvec[6] = (unsigned long)arg6; \
3799  _argvec[7] = (unsigned long)arg7; \
3800  _argvec[8] = (unsigned long)arg8; \
3801  _argvec[9] = (unsigned long)arg9; \
3802  __asm__ volatile( \
3803  VALGRIND_CFI_PROLOGUE \
3804  "aghi 15,-192\n\t" \
3805  "lg 2, 8(1)\n\t" \
3806  "lg 3,16(1)\n\t" \
3807  "lg 4,24(1)\n\t" \
3808  "lg 5,32(1)\n\t" \
3809  "lg 6,40(1)\n\t" \
3810  "mvc 160(8,15), 48(1)\n\t" \
3811  "mvc 168(8,15), 56(1)\n\t" \
3812  "mvc 176(8,15), 64(1)\n\t" \
3813  "mvc 184(8,15), 72(1)\n\t" \
3814  "lg 1, 0(1)\n\t" \
3815  VALGRIND_CALL_NOREDIR_R1 \
3816  "lgr %0, 2\n\t" \
3817  "aghi 15,192\n\t" \
3818  VALGRIND_CFI_EPILOGUE \
3819  : /*out*/ "=d" (_res) \
3820  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3821  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3822  ); \
3823  lval = (__typeof__(lval)) _res; \
3824  } while (0)
3825 
3826 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3827  arg6, arg7 ,arg8, arg9, arg10) \
3828  do { \
3829  volatile OrigFn _orig = (orig); \
3830  volatile unsigned long _argvec[11]; \
3831  volatile unsigned long _res; \
3832  _argvec[0] = (unsigned long)_orig.nraddr; \
3833  _argvec[1] = (unsigned long)arg1; \
3834  _argvec[2] = (unsigned long)arg2; \
3835  _argvec[3] = (unsigned long)arg3; \
3836  _argvec[4] = (unsigned long)arg4; \
3837  _argvec[5] = (unsigned long)arg5; \
3838  _argvec[6] = (unsigned long)arg6; \
3839  _argvec[7] = (unsigned long)arg7; \
3840  _argvec[8] = (unsigned long)arg8; \
3841  _argvec[9] = (unsigned long)arg9; \
3842  _argvec[10] = (unsigned long)arg10; \
3843  __asm__ volatile( \
3844  VALGRIND_CFI_PROLOGUE \
3845  "aghi 15,-200\n\t" \
3846  "lg 2, 8(1)\n\t" \
3847  "lg 3,16(1)\n\t" \
3848  "lg 4,24(1)\n\t" \
3849  "lg 5,32(1)\n\t" \
3850  "lg 6,40(1)\n\t" \
3851  "mvc 160(8,15), 48(1)\n\t" \
3852  "mvc 168(8,15), 56(1)\n\t" \
3853  "mvc 176(8,15), 64(1)\n\t" \
3854  "mvc 184(8,15), 72(1)\n\t" \
3855  "mvc 192(8,15), 80(1)\n\t" \
3856  "lg 1, 0(1)\n\t" \
3857  VALGRIND_CALL_NOREDIR_R1 \
3858  "lgr %0, 2\n\t" \
3859  "aghi 15,200\n\t" \
3860  VALGRIND_CFI_EPILOGUE \
3861  : /*out*/ "=d" (_res) \
3862  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3863  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3864  ); \
3865  lval = (__typeof__(lval)) _res; \
3866  } while (0)
3867 
3868 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3869  arg6, arg7 ,arg8, arg9, arg10, arg11) \
3870  do { \
3871  volatile OrigFn _orig = (orig); \
3872  volatile unsigned long _argvec[12]; \
3873  volatile unsigned long _res; \
3874  _argvec[0] = (unsigned long)_orig.nraddr; \
3875  _argvec[1] = (unsigned long)arg1; \
3876  _argvec[2] = (unsigned long)arg2; \
3877  _argvec[3] = (unsigned long)arg3; \
3878  _argvec[4] = (unsigned long)arg4; \
3879  _argvec[5] = (unsigned long)arg5; \
3880  _argvec[6] = (unsigned long)arg6; \
3881  _argvec[7] = (unsigned long)arg7; \
3882  _argvec[8] = (unsigned long)arg8; \
3883  _argvec[9] = (unsigned long)arg9; \
3884  _argvec[10] = (unsigned long)arg10; \
3885  _argvec[11] = (unsigned long)arg11; \
3886  __asm__ volatile( \
3887  VALGRIND_CFI_PROLOGUE \
3888  "aghi 15,-208\n\t" \
3889  "lg 2, 8(1)\n\t" \
3890  "lg 3,16(1)\n\t" \
3891  "lg 4,24(1)\n\t" \
3892  "lg 5,32(1)\n\t" \
3893  "lg 6,40(1)\n\t" \
3894  "mvc 160(8,15), 48(1)\n\t" \
3895  "mvc 168(8,15), 56(1)\n\t" \
3896  "mvc 176(8,15), 64(1)\n\t" \
3897  "mvc 184(8,15), 72(1)\n\t" \
3898  "mvc 192(8,15), 80(1)\n\t" \
3899  "mvc 200(8,15), 88(1)\n\t" \
3900  "lg 1, 0(1)\n\t" \
3901  VALGRIND_CALL_NOREDIR_R1 \
3902  "lgr %0, 2\n\t" \
3903  "aghi 15,208\n\t" \
3904  VALGRIND_CFI_EPILOGUE \
3905  : /*out*/ "=d" (_res) \
3906  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3907  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3908  ); \
3909  lval = (__typeof__(lval)) _res; \
3910  } while (0)
3911 
3912 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3913  arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
3914  do { \
3915  volatile OrigFn _orig = (orig); \
3916  volatile unsigned long _argvec[13]; \
3917  volatile unsigned long _res; \
3918  _argvec[0] = (unsigned long)_orig.nraddr; \
3919  _argvec[1] = (unsigned long)arg1; \
3920  _argvec[2] = (unsigned long)arg2; \
3921  _argvec[3] = (unsigned long)arg3; \
3922  _argvec[4] = (unsigned long)arg4; \
3923  _argvec[5] = (unsigned long)arg5; \
3924  _argvec[6] = (unsigned long)arg6; \
3925  _argvec[7] = (unsigned long)arg7; \
3926  _argvec[8] = (unsigned long)arg8; \
3927  _argvec[9] = (unsigned long)arg9; \
3928  _argvec[10] = (unsigned long)arg10; \
3929  _argvec[11] = (unsigned long)arg11; \
3930  _argvec[12] = (unsigned long)arg12; \
3931  __asm__ volatile( \
3932  VALGRIND_CFI_PROLOGUE \
3933  "aghi 15,-216\n\t" \
3934  "lg 2, 8(1)\n\t" \
3935  "lg 3,16(1)\n\t" \
3936  "lg 4,24(1)\n\t" \
3937  "lg 5,32(1)\n\t" \
3938  "lg 6,40(1)\n\t" \
3939  "mvc 160(8,15), 48(1)\n\t" \
3940  "mvc 168(8,15), 56(1)\n\t" \
3941  "mvc 176(8,15), 64(1)\n\t" \
3942  "mvc 184(8,15), 72(1)\n\t" \
3943  "mvc 192(8,15), 80(1)\n\t" \
3944  "mvc 200(8,15), 88(1)\n\t" \
3945  "mvc 208(8,15), 96(1)\n\t" \
3946  "lg 1, 0(1)\n\t" \
3947  VALGRIND_CALL_NOREDIR_R1 \
3948  "lgr %0, 2\n\t" \
3949  "aghi 15,216\n\t" \
3950  VALGRIND_CFI_EPILOGUE \
3951  : /*out*/ "=d" (_res) \
3952  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3953  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3954  ); \
3955  lval = (__typeof__(lval)) _res; \
3956  } while (0)
3957 
3958 
3959 #endif /* PLAT_s390x_linux */
3960 
3961 /* ------------------------- mips32-linux ----------------------- */
3962 
3963 #if defined(PLAT_mips32_linux)
3964 
3965 /* These regs are trashed by the hidden call. */
3966 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
3967 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
3968 "$25", "$31"
3969 
3970 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
3971  long) == 4. */
3972 
3973 #define CALL_FN_W_v(lval, orig) \
3974  do { \
3975  volatile OrigFn _orig = (orig); \
3976  volatile unsigned long _argvec[1]; \
3977  volatile unsigned long _res; \
3978  _argvec[0] = (unsigned long)_orig.nraddr; \
3979  __asm__ volatile( \
3980  "subu $29, $29, 8 \n\t" \
3981  "sw $28, 0($29) \n\t" \
3982  "sw $31, 4($29) \n\t" \
3983  "subu $29, $29, 16 \n\t" \
3984  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3985  VALGRIND_CALL_NOREDIR_T9 \
3986  "addu $29, $29, 16\n\t" \
3987  "lw $28, 0($29) \n\t" \
3988  "lw $31, 4($29) \n\t" \
3989  "addu $29, $29, 8 \n\t" \
3990  "move %0, $2\n" \
3991  : /*out*/ "=r" (_res) \
3992  : /*in*/ "0" (&_argvec[0]) \
3993  : /*trash*/ "memory", __CALLER_SAVED_REGS \
3994  ); \
3995  lval = (__typeof__(lval)) _res; \
3996  } while (0)
3997 
3998 #define CALL_FN_W_W(lval, orig, arg1) \
3999  do { \
4000  volatile OrigFn _orig = (orig); \
4001  volatile unsigned long _argvec[2]; \
4002  volatile unsigned long _res; \
4003  _argvec[0] = (unsigned long)_orig.nraddr; \
4004  _argvec[1] = (unsigned long)(arg1); \
4005  __asm__ volatile( \
4006  "subu $29, $29, 8 \n\t" \
4007  "sw $28, 0($29) \n\t" \
4008  "sw $31, 4($29) \n\t" \
4009  "subu $29, $29, 16 \n\t" \
4010  "lw $4, 4(%1) \n\t" /* arg1*/ \
4011  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4012  VALGRIND_CALL_NOREDIR_T9 \
4013  "addu $29, $29, 16 \n\t" \
4014  "lw $28, 0($29) \n\t" \
4015  "lw $31, 4($29) \n\t" \
4016  "addu $29, $29, 8 \n\t" \
4017  "move %0, $2\n" \
4018  : /*out*/ "=r" (_res) \
4019  : /*in*/ "0" (&_argvec[0]) \
4020  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4021  ); \
4022  lval = (__typeof__(lval)) _res; \
4023  } while (0)
4024 
4025 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4026  do { \
4027  volatile OrigFn _orig = (orig); \
4028  volatile unsigned long _argvec[3]; \
4029  volatile unsigned long _res; \
4030  _argvec[0] = (unsigned long)_orig.nraddr; \
4031  _argvec[1] = (unsigned long)(arg1); \
4032  _argvec[2] = (unsigned long)(arg2); \
4033  __asm__ volatile( \
4034  "subu $29, $29, 8 \n\t" \
4035  "sw $28, 0($29) \n\t" \
4036  "sw $31, 4($29) \n\t" \
4037  "subu $29, $29, 16 \n\t" \
4038  "lw $4, 4(%1) \n\t" \
4039  "lw $5, 8(%1) \n\t" \
4040  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4041  VALGRIND_CALL_NOREDIR_T9 \
4042  "addu $29, $29, 16 \n\t" \
4043  "lw $28, 0($29) \n\t" \
4044  "lw $31, 4($29) \n\t" \
4045  "addu $29, $29, 8 \n\t" \
4046  "move %0, $2\n" \
4047  : /*out*/ "=r" (_res) \
4048  : /*in*/ "0" (&_argvec[0]) \
4049  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4050  ); \
4051  lval = (__typeof__(lval)) _res; \
4052  } while (0)
4053 
4054 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4055  do { \
4056  volatile OrigFn _orig = (orig); \
4057  volatile unsigned long _argvec[4]; \
4058  volatile unsigned long _res; \
4059  _argvec[0] = (unsigned long)_orig.nraddr; \
4060  _argvec[1] = (unsigned long)(arg1); \
4061  _argvec[2] = (unsigned long)(arg2); \
4062  _argvec[3] = (unsigned long)(arg3); \
4063  __asm__ volatile( \
4064  "subu $29, $29, 8 \n\t" \
4065  "sw $28, 0($29) \n\t" \
4066  "sw $31, 4($29) \n\t" \
4067  "subu $29, $29, 16 \n\t" \
4068  "lw $4, 4(%1) \n\t" \
4069  "lw $5, 8(%1) \n\t" \
4070  "lw $6, 12(%1) \n\t" \
4071  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4072  VALGRIND_CALL_NOREDIR_T9 \
4073  "addu $29, $29, 16 \n\t" \
4074  "lw $28, 0($29) \n\t" \
4075  "lw $31, 4($29) \n\t" \
4076  "addu $29, $29, 8 \n\t" \
4077  "move %0, $2\n" \
4078  : /*out*/ "=r" (_res) \
4079  : /*in*/ "0" (&_argvec[0]) \
4080  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4081  ); \
4082  lval = (__typeof__(lval)) _res; \
4083  } while (0)
4084 
4085 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4086  do { \
4087  volatile OrigFn _orig = (orig); \
4088  volatile unsigned long _argvec[5]; \
4089  volatile unsigned long _res; \
4090  _argvec[0] = (unsigned long)_orig.nraddr; \
4091  _argvec[1] = (unsigned long)(arg1); \
4092  _argvec[2] = (unsigned long)(arg2); \
4093  _argvec[3] = (unsigned long)(arg3); \
4094  _argvec[4] = (unsigned long)(arg4); \
4095  __asm__ volatile( \
4096  "subu $29, $29, 8 \n\t" \
4097  "sw $28, 0($29) \n\t" \
4098  "sw $31, 4($29) \n\t" \
4099  "subu $29, $29, 16 \n\t" \
4100  "lw $4, 4(%1) \n\t" \
4101  "lw $5, 8(%1) \n\t" \
4102  "lw $6, 12(%1) \n\t" \
4103  "lw $7, 16(%1) \n\t" \
4104  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4105  VALGRIND_CALL_NOREDIR_T9 \
4106  "addu $29, $29, 16 \n\t" \
4107  "lw $28, 0($29) \n\t" \
4108  "lw $31, 4($29) \n\t" \
4109  "addu $29, $29, 8 \n\t" \
4110  "move %0, $2\n" \
4111  : /*out*/ "=r" (_res) \
4112  : /*in*/ "0" (&_argvec[0]) \
4113  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4114  ); \
4115  lval = (__typeof__(lval)) _res; \
4116  } while (0)
4117 
4118 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4119  do { \
4120  volatile OrigFn _orig = (orig); \
4121  volatile unsigned long _argvec[6]; \
4122  volatile unsigned long _res; \
4123  _argvec[0] = (unsigned long)_orig.nraddr; \
4124  _argvec[1] = (unsigned long)(arg1); \
4125  _argvec[2] = (unsigned long)(arg2); \
4126  _argvec[3] = (unsigned long)(arg3); \
4127  _argvec[4] = (unsigned long)(arg4); \
4128  _argvec[5] = (unsigned long)(arg5); \
4129  __asm__ volatile( \
4130  "subu $29, $29, 8 \n\t" \
4131  "sw $28, 0($29) \n\t" \
4132  "sw $31, 4($29) \n\t" \
4133  "lw $4, 20(%1) \n\t" \
4134  "subu $29, $29, 24\n\t" \
4135  "sw $4, 16($29) \n\t" \
4136  "lw $4, 4(%1) \n\t" \
4137  "lw $5, 8(%1) \n\t" \
4138  "lw $6, 12(%1) \n\t" \
4139  "lw $7, 16(%1) \n\t" \
4140  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4141  VALGRIND_CALL_NOREDIR_T9 \
4142  "addu $29, $29, 24 \n\t" \
4143  "lw $28, 0($29) \n\t" \
4144  "lw $31, 4($29) \n\t" \
4145  "addu $29, $29, 8 \n\t" \
4146  "move %0, $2\n" \
4147  : /*out*/ "=r" (_res) \
4148  : /*in*/ "0" (&_argvec[0]) \
4149  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4150  ); \
4151  lval = (__typeof__(lval)) _res; \
4152  } while (0)
4153 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4154  do { \
4155  volatile OrigFn _orig = (orig); \
4156  volatile unsigned long _argvec[7]; \
4157  volatile unsigned long _res; \
4158  _argvec[0] = (unsigned long)_orig.nraddr; \
4159  _argvec[1] = (unsigned long)(arg1); \
4160  _argvec[2] = (unsigned long)(arg2); \
4161  _argvec[3] = (unsigned long)(arg3); \
4162  _argvec[4] = (unsigned long)(arg4); \
4163  _argvec[5] = (unsigned long)(arg5); \
4164  _argvec[6] = (unsigned long)(arg6); \
4165  __asm__ volatile( \
4166  "subu $29, $29, 8 \n\t" \
4167  "sw $28, 0($29) \n\t" \
4168  "sw $31, 4($29) \n\t" \
4169  "lw $4, 20(%1) \n\t" \
4170  "subu $29, $29, 32\n\t" \
4171  "sw $4, 16($29) \n\t" \
4172  "lw $4, 24(%1) \n\t" \
4173  "nop\n\t" \
4174  "sw $4, 20($29) \n\t" \
4175  "lw $4, 4(%1) \n\t" \
4176  "lw $5, 8(%1) \n\t" \
4177  "lw $6, 12(%1) \n\t" \
4178  "lw $7, 16(%1) \n\t" \
4179  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4180  VALGRIND_CALL_NOREDIR_T9 \
4181  "addu $29, $29, 32 \n\t" \
4182  "lw $28, 0($29) \n\t" \
4183  "lw $31, 4($29) \n\t" \
4184  "addu $29, $29, 8 \n\t" \
4185  "move %0, $2\n" \
4186  : /*out*/ "=r" (_res) \
4187  : /*in*/ "0" (&_argvec[0]) \
4188  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4189  ); \
4190  lval = (__typeof__(lval)) _res; \
4191  } while (0)
4192 
4193 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4194  arg7) \
4195  do { \
4196  volatile OrigFn _orig = (orig); \
4197  volatile unsigned long _argvec[8]; \
4198  volatile unsigned long _res; \
4199  _argvec[0] = (unsigned long)_orig.nraddr; \
4200  _argvec[1] = (unsigned long)(arg1); \
4201  _argvec[2] = (unsigned long)(arg2); \
4202  _argvec[3] = (unsigned long)(arg3); \
4203  _argvec[4] = (unsigned long)(arg4); \
4204  _argvec[5] = (unsigned long)(arg5); \
4205  _argvec[6] = (unsigned long)(arg6); \
4206  _argvec[7] = (unsigned long)(arg7); \
4207  __asm__ volatile( \
4208  "subu $29, $29, 8 \n\t" \
4209  "sw $28, 0($29) \n\t" \
4210  "sw $31, 4($29) \n\t" \
4211  "lw $4, 20(%1) \n\t" \
4212  "subu $29, $29, 32\n\t" \
4213  "sw $4, 16($29) \n\t" \
4214  "lw $4, 24(%1) \n\t" \
4215  "sw $4, 20($29) \n\t" \
4216  "lw $4, 28(%1) \n\t" \
4217  "sw $4, 24($29) \n\t" \
4218  "lw $4, 4(%1) \n\t" \
4219  "lw $5, 8(%1) \n\t" \
4220  "lw $6, 12(%1) \n\t" \
4221  "lw $7, 16(%1) \n\t" \
4222  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4223  VALGRIND_CALL_NOREDIR_T9 \
4224  "addu $29, $29, 32 \n\t" \
4225  "lw $28, 0($29) \n\t" \
4226  "lw $31, 4($29) \n\t" \
4227  "addu $29, $29, 8 \n\t" \
4228  "move %0, $2\n" \
4229  : /*out*/ "=r" (_res) \
4230  : /*in*/ "0" (&_argvec[0]) \
4231  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4232  ); \
4233  lval = (__typeof__(lval)) _res; \
4234  } while (0)
4235 
4236 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4237  arg7,arg8) \
4238  do { \
4239  volatile OrigFn _orig = (orig); \
4240  volatile unsigned long _argvec[9]; \
4241  volatile unsigned long _res; \
4242  _argvec[0] = (unsigned long)_orig.nraddr; \
4243  _argvec[1] = (unsigned long)(arg1); \
4244  _argvec[2] = (unsigned long)(arg2); \
4245  _argvec[3] = (unsigned long)(arg3); \
4246  _argvec[4] = (unsigned long)(arg4); \
4247  _argvec[5] = (unsigned long)(arg5); \
4248  _argvec[6] = (unsigned long)(arg6); \
4249  _argvec[7] = (unsigned long)(arg7); \
4250  _argvec[8] = (unsigned long)(arg8); \
4251  __asm__ volatile( \
4252  "subu $29, $29, 8 \n\t" \
4253  "sw $28, 0($29) \n\t" \
4254  "sw $31, 4($29) \n\t" \
4255  "lw $4, 20(%1) \n\t" \
4256  "subu $29, $29, 40\n\t" \
4257  "sw $4, 16($29) \n\t" \
4258  "lw $4, 24(%1) \n\t" \
4259  "sw $4, 20($29) \n\t" \
4260  "lw $4, 28(%1) \n\t" \
4261  "sw $4, 24($29) \n\t" \
4262  "lw $4, 32(%1) \n\t" \
4263  "sw $4, 28($29) \n\t" \
4264  "lw $4, 4(%1) \n\t" \
4265  "lw $5, 8(%1) \n\t" \
4266  "lw $6, 12(%1) \n\t" \
4267  "lw $7, 16(%1) \n\t" \
4268  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4269  VALGRIND_CALL_NOREDIR_T9 \
4270  "addu $29, $29, 40 \n\t" \
4271  "lw $28, 0($29) \n\t" \
4272  "lw $31, 4($29) \n\t" \
4273  "addu $29, $29, 8 \n\t" \
4274  "move %0, $2\n" \
4275  : /*out*/ "=r" (_res) \
4276  : /*in*/ "0" (&_argvec[0]) \
4277  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4278  ); \
4279  lval = (__typeof__(lval)) _res; \
4280  } while (0)
4281 
4282 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4283  arg7,arg8,arg9) \
4284  do { \
4285  volatile OrigFn _orig = (orig); \
4286  volatile unsigned long _argvec[10]; \
4287  volatile unsigned long _res; \
4288  _argvec[0] = (unsigned long)_orig.nraddr; \
4289  _argvec[1] = (unsigned long)(arg1); \
4290  _argvec[2] = (unsigned long)(arg2); \
4291  _argvec[3] = (unsigned long)(arg3); \
4292  _argvec[4] = (unsigned long)(arg4); \
4293  _argvec[5] = (unsigned long)(arg5); \
4294  _argvec[6] = (unsigned long)(arg6); \
4295  _argvec[7] = (unsigned long)(arg7); \
4296  _argvec[8] = (unsigned long)(arg8); \
4297  _argvec[9] = (unsigned long)(arg9); \
4298  __asm__ volatile( \
4299  "subu $29, $29, 8 \n\t" \
4300  "sw $28, 0($29) \n\t" \
4301  "sw $31, 4($29) \n\t" \
4302  "lw $4, 20(%1) \n\t" \
4303  "subu $29, $29, 40\n\t" \
4304  "sw $4, 16($29) \n\t" \
4305  "lw $4, 24(%1) \n\t" \
4306  "sw $4, 20($29) \n\t" \
4307  "lw $4, 28(%1) \n\t" \
4308  "sw $4, 24($29) \n\t" \
4309  "lw $4, 32(%1) \n\t" \
4310  "sw $4, 28($29) \n\t" \
4311  "lw $4, 36(%1) \n\t" \
4312  "sw $4, 32($29) \n\t" \
4313  "lw $4, 4(%1) \n\t" \
4314  "lw $5, 8(%1) \n\t" \
4315  "lw $6, 12(%1) \n\t" \
4316  "lw $7, 16(%1) \n\t" \
4317  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4318  VALGRIND_CALL_NOREDIR_T9 \
4319  "addu $29, $29, 40 \n\t" \
4320  "lw $28, 0($29) \n\t" \
4321  "lw $31, 4($29) \n\t" \
4322  "addu $29, $29, 8 \n\t" \
4323  "move %0, $2\n" \
4324  : /*out*/ "=r" (_res) \
4325  : /*in*/ "0" (&_argvec[0]) \
4326  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4327  ); \
4328  lval = (__typeof__(lval)) _res; \
4329  } while (0)
4330 
4331 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4332  arg7,arg8,arg9,arg10) \
4333  do { \
4334  volatile OrigFn _orig = (orig); \
4335  volatile unsigned long _argvec[11]; \
4336  volatile unsigned long _res; \
4337  _argvec[0] = (unsigned long)_orig.nraddr; \
4338  _argvec[1] = (unsigned long)(arg1); \
4339  _argvec[2] = (unsigned long)(arg2); \
4340  _argvec[3] = (unsigned long)(arg3); \
4341  _argvec[4] = (unsigned long)(arg4); \
4342  _argvec[5] = (unsigned long)(arg5); \
4343  _argvec[6] = (unsigned long)(arg6); \
4344  _argvec[7] = (unsigned long)(arg7); \
4345  _argvec[8] = (unsigned long)(arg8); \
4346  _argvec[9] = (unsigned long)(arg9); \
4347  _argvec[10] = (unsigned long)(arg10); \
4348  __asm__ volatile( \
4349  "subu $29, $29, 8 \n\t" \
4350  "sw $28, 0($29) \n\t" \
4351  "sw $31, 4($29) \n\t" \
4352  "lw $4, 20(%1) \n\t" \
4353  "subu $29, $29, 48\n\t" \
4354  "sw $4, 16($29) \n\t" \
4355  "lw $4, 24(%1) \n\t" \
4356  "sw $4, 20($29) \n\t" \
4357  "lw $4, 28(%1) \n\t" \
4358  "sw $4, 24($29) \n\t" \
4359  "lw $4, 32(%1) \n\t" \
4360  "sw $4, 28($29) \n\t" \
4361  "lw $4, 36(%1) \n\t" \
4362  "sw $4, 32($29) \n\t" \
4363  "lw $4, 40(%1) \n\t" \
4364  "sw $4, 36($29) \n\t" \
4365  "lw $4, 4(%1) \n\t" \
4366  "lw $5, 8(%1) \n\t" \
4367  "lw $6, 12(%1) \n\t" \
4368  "lw $7, 16(%1) \n\t" \
4369  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4370  VALGRIND_CALL_NOREDIR_T9 \
4371  "addu $29, $29, 48 \n\t" \
4372  "lw $28, 0($29) \n\t" \
4373  "lw $31, 4($29) \n\t" \
4374  "addu $29, $29, 8 \n\t" \
4375  "move %0, $2\n" \
4376  : /*out*/ "=r" (_res) \
4377  : /*in*/ "0" (&_argvec[0]) \
4378  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4379  ); \
4380  lval = (__typeof__(lval)) _res; \
4381  } while (0)
4382 
4383 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4384  arg6,arg7,arg8,arg9,arg10, \
4385  arg11) \
4386  do { \
4387  volatile OrigFn _orig = (orig); \
4388  volatile unsigned long _argvec[12]; \
4389  volatile unsigned long _res; \
4390  _argvec[0] = (unsigned long)_orig.nraddr; \
4391  _argvec[1] = (unsigned long)(arg1); \
4392  _argvec[2] = (unsigned long)(arg2); \
4393  _argvec[3] = (unsigned long)(arg3); \
4394  _argvec[4] = (unsigned long)(arg4); \
4395  _argvec[5] = (unsigned long)(arg5); \
4396  _argvec[6] = (unsigned long)(arg6); \
4397  _argvec[7] = (unsigned long)(arg7); \
4398  _argvec[8] = (unsigned long)(arg8); \
4399  _argvec[9] = (unsigned long)(arg9); \
4400  _argvec[10] = (unsigned long)(arg10); \
4401  _argvec[11] = (unsigned long)(arg11); \
4402  __asm__ volatile( \
4403  "subu $29, $29, 8 \n\t" \
4404  "sw $28, 0($29) \n\t" \
4405  "sw $31, 4($29) \n\t" \
4406  "lw $4, 20(%1) \n\t" \
4407  "subu $29, $29, 48\n\t" \
4408  "sw $4, 16($29) \n\t" \
4409  "lw $4, 24(%1) \n\t" \
4410  "sw $4, 20($29) \n\t" \
4411  "lw $4, 28(%1) \n\t" \
4412  "sw $4, 24($29) \n\t" \
4413  "lw $4, 32(%1) \n\t" \
4414  "sw $4, 28($29) \n\t" \
4415  "lw $4, 36(%1) \n\t" \
4416  "sw $4, 32($29) \n\t" \
4417  "lw $4, 40(%1) \n\t" \
4418  "sw $4, 36($29) \n\t" \
4419  "lw $4, 44(%1) \n\t" \
4420  "sw $4, 40($29) \n\t" \
4421  "lw $4, 4(%1) \n\t" \
4422  "lw $5, 8(%1) \n\t" \
4423  "lw $6, 12(%1) \n\t" \
4424  "lw $7, 16(%1) \n\t" \
4425  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4426  VALGRIND_CALL_NOREDIR_T9 \
4427  "addu $29, $29, 48 \n\t" \
4428  "lw $28, 0($29) \n\t" \
4429  "lw $31, 4($29) \n\t" \
4430  "addu $29, $29, 8 \n\t" \
4431  "move %0, $2\n" \
4432  : /*out*/ "=r" (_res) \
4433  : /*in*/ "0" (&_argvec[0]) \
4434  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4435  ); \
4436  lval = (__typeof__(lval)) _res; \
4437  } while (0)
4438 
4439 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4440  arg6,arg7,arg8,arg9,arg10, \
4441  arg11,arg12) \
4442  do { \
4443  volatile OrigFn _orig = (orig); \
4444  volatile unsigned long _argvec[13]; \
4445  volatile unsigned long _res; \
4446  _argvec[0] = (unsigned long)_orig.nraddr; \
4447  _argvec[1] = (unsigned long)(arg1); \
4448  _argvec[2] = (unsigned long)(arg2); \
4449  _argvec[3] = (unsigned long)(arg3); \
4450  _argvec[4] = (unsigned long)(arg4); \
4451  _argvec[5] = (unsigned long)(arg5); \
4452  _argvec[6] = (unsigned long)(arg6); \
4453  _argvec[7] = (unsigned long)(arg7); \
4454  _argvec[8] = (unsigned long)(arg8); \
4455  _argvec[9] = (unsigned long)(arg9); \
4456  _argvec[10] = (unsigned long)(arg10); \
4457  _argvec[11] = (unsigned long)(arg11); \
4458  _argvec[12] = (unsigned long)(arg12); \
4459  __asm__ volatile( \
4460  "subu $29, $29, 8 \n\t" \
4461  "sw $28, 0($29) \n\t" \
4462  "sw $31, 4($29) \n\t" \
4463  "lw $4, 20(%1) \n\t" \
4464  "subu $29, $29, 56\n\t" \
4465  "sw $4, 16($29) \n\t" \
4466  "lw $4, 24(%1) \n\t" \
4467  "sw $4, 20($29) \n\t" \
4468  "lw $4, 28(%1) \n\t" \
4469  "sw $4, 24($29) \n\t" \
4470  "lw $4, 32(%1) \n\t" \
4471  "sw $4, 28($29) \n\t" \
4472  "lw $4, 36(%1) \n\t" \
4473  "sw $4, 32($29) \n\t" \
4474  "lw $4, 40(%1) \n\t" \
4475  "sw $4, 36($29) \n\t" \
4476  "lw $4, 44(%1) \n\t" \
4477  "sw $4, 40($29) \n\t" \
4478  "lw $4, 48(%1) \n\t" \
4479  "sw $4, 44($29) \n\t" \
4480  "lw $4, 4(%1) \n\t" \
4481  "lw $5, 8(%1) \n\t" \
4482  "lw $6, 12(%1) \n\t" \
4483  "lw $7, 16(%1) \n\t" \
4484  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4485  VALGRIND_CALL_NOREDIR_T9 \
4486  "addu $29, $29, 56 \n\t" \
4487  "lw $28, 0($29) \n\t" \
4488  "lw $31, 4($29) \n\t" \
4489  "addu $29, $29, 8 \n\t" \
4490  "move %0, $2\n" \
4491  : /*out*/ "=r" (_res) \
4492  : /*in*/ "r" (&_argvec[0]) \
4493  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4494  ); \
4495  lval = (__typeof__(lval)) _res; \
4496  } while (0)
4497 
4498 #endif /* PLAT_mips32_linux */
4499 
4500 /* ------------------------- mips64-linux ------------------------- */
4501 
4502 #if defined(PLAT_mips64_linux)
4503 
4504 /* These regs are trashed by the hidden call. */
4505 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4506 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4507 "$25", "$31"
4508 
4509 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4510  long) == 4. */
4511 
4512 #define CALL_FN_W_v(lval, orig) \
4513  do { \
4514  volatile OrigFn _orig = (orig); \
4515  volatile unsigned long _argvec[1]; \
4516  volatile unsigned long _res; \
4517  _argvec[0] = (unsigned long)_orig.nraddr; \
4518  __asm__ volatile( \
4519  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4520  VALGRIND_CALL_NOREDIR_T9 \
4521  "move %0, $2\n" \
4522  : /*out*/ "=r" (_res) \
4523  : /*in*/ "0" (&_argvec[0]) \
4524  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4525  ); \
4526  lval = (__typeof__(lval)) _res; \
4527  } while (0)
4528 
4529 #define CALL_FN_W_W(lval, orig, arg1) \
4530  do { \
4531  volatile OrigFn _orig = (orig); \
4532  volatile unsigned long _argvec[2]; \
4533  volatile unsigned long _res; \
4534  _argvec[0] = (unsigned long)_orig.nraddr; \
4535  _argvec[1] = (unsigned long)(arg1); \
4536  __asm__ volatile( \
4537  "ld $4, 8(%1)\n\t" /* arg1*/ \
4538  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4539  VALGRIND_CALL_NOREDIR_T9 \
4540  "move %0, $2\n" \
4541  : /*out*/ "=r" (_res) \
4542  : /*in*/ "r" (&_argvec[0]) \
4543  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4544  ); \
4545  lval = (__typeof__(lval)) _res; \
4546  } while (0)
4547 
4548 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4549  do { \
4550  volatile OrigFn _orig = (orig); \
4551  volatile unsigned long _argvec[3]; \
4552  volatile unsigned long _res; \
4553  _argvec[0] = (unsigned long)_orig.nraddr; \
4554  _argvec[1] = (unsigned long)(arg1); \
4555  _argvec[2] = (unsigned long)(arg2); \
4556  __asm__ volatile( \
4557  "ld $4, 8(%1)\n\t" \
4558  "ld $5, 16(%1)\n\t" \
4559  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4560  VALGRIND_CALL_NOREDIR_T9 \
4561  "move %0, $2\n" \
4562  : /*out*/ "=r" (_res) \
4563  : /*in*/ "r" (&_argvec[0]) \
4564  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4565  ); \
4566  lval = (__typeof__(lval)) _res; \
4567  } while (0)
4568 
4569 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4570  do { \
4571  volatile OrigFn _orig = (orig); \
4572  volatile unsigned long _argvec[4]; \
4573  volatile unsigned long _res; \
4574  _argvec[0] = (unsigned long)_orig.nraddr; \
4575  _argvec[1] = (unsigned long)(arg1); \
4576  _argvec[2] = (unsigned long)(arg2); \
4577  _argvec[3] = (unsigned long)(arg3); \
4578  __asm__ volatile( \
4579  "ld $4, 8(%1)\n\t" \
4580  "ld $5, 16(%1)\n\t" \
4581  "ld $6, 24(%1)\n\t" \
4582  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4583  VALGRIND_CALL_NOREDIR_T9 \
4584  "move %0, $2\n" \
4585  : /*out*/ "=r" (_res) \
4586  : /*in*/ "r" (&_argvec[0]) \
4587  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4588  ); \
4589  lval = (__typeof__(lval)) _res; \
4590  } while (0)
4591 
4592 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4593  do { \
4594  volatile OrigFn _orig = (orig); \
4595  volatile unsigned long _argvec[5]; \
4596  volatile unsigned long _res; \
4597  _argvec[0] = (unsigned long)_orig.nraddr; \
4598  _argvec[1] = (unsigned long)(arg1); \
4599  _argvec[2] = (unsigned long)(arg2); \
4600  _argvec[3] = (unsigned long)(arg3); \
4601  _argvec[4] = (unsigned long)(arg4); \
4602  __asm__ volatile( \
4603  "ld $4, 8(%1)\n\t" \
4604  "ld $5, 16(%1)\n\t" \
4605  "ld $6, 24(%1)\n\t" \
4606  "ld $7, 32(%1)\n\t" \
4607  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4608  VALGRIND_CALL_NOREDIR_T9 \
4609  "move %0, $2\n" \
4610  : /*out*/ "=r" (_res) \
4611  : /*in*/ "r" (&_argvec[0]) \
4612  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4613  ); \
4614  lval = (__typeof__(lval)) _res; \
4615  } while (0)
4616 
4617 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4618  do { \
4619  volatile OrigFn _orig = (orig); \
4620  volatile unsigned long _argvec[6]; \
4621  volatile unsigned long _res; \
4622  _argvec[0] = (unsigned long)_orig.nraddr; \
4623  _argvec[1] = (unsigned long)(arg1); \
4624  _argvec[2] = (unsigned long)(arg2); \
4625  _argvec[3] = (unsigned long)(arg3); \
4626  _argvec[4] = (unsigned long)(arg4); \
4627  _argvec[5] = (unsigned long)(arg5); \
4628  __asm__ volatile( \
4629  "ld $4, 8(%1)\n\t" \
4630  "ld $5, 16(%1)\n\t" \
4631  "ld $6, 24(%1)\n\t" \
4632  "ld $7, 32(%1)\n\t" \
4633  "ld $8, 40(%1)\n\t" \
4634  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4635  VALGRIND_CALL_NOREDIR_T9 \
4636  "move %0, $2\n" \
4637  : /*out*/ "=r" (_res) \
4638  : /*in*/ "r" (&_argvec[0]) \
4639  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4640  ); \
4641  lval = (__typeof__(lval)) _res; \
4642  } while (0)
4643 
4644 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4645  do { \
4646  volatile OrigFn _orig = (orig); \
4647  volatile unsigned long _argvec[7]; \
4648  volatile unsigned long _res; \
4649  _argvec[0] = (unsigned long)_orig.nraddr; \
4650  _argvec[1] = (unsigned long)(arg1); \
4651  _argvec[2] = (unsigned long)(arg2); \
4652  _argvec[3] = (unsigned long)(arg3); \
4653  _argvec[4] = (unsigned long)(arg4); \
4654  _argvec[5] = (unsigned long)(arg5); \
4655  _argvec[6] = (unsigned long)(arg6); \
4656  __asm__ volatile( \
4657  "ld $4, 8(%1)\n\t" \
4658  "ld $5, 16(%1)\n\t" \
4659  "ld $6, 24(%1)\n\t" \
4660  "ld $7, 32(%1)\n\t" \
4661  "ld $8, 40(%1)\n\t" \
4662  "ld $9, 48(%1)\n\t" \
4663  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4664  VALGRIND_CALL_NOREDIR_T9 \
4665  "move %0, $2\n" \
4666  : /*out*/ "=r" (_res) \
4667  : /*in*/ "r" (&_argvec[0]) \
4668  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4669  ); \
4670  lval = (__typeof__(lval)) _res; \
4671  } while (0)
4672 
4673 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4674  arg7) \
4675  do { \
4676  volatile OrigFn _orig = (orig); \
4677  volatile unsigned long _argvec[8]; \
4678  volatile unsigned long _res; \
4679  _argvec[0] = (unsigned long)_orig.nraddr; \
4680  _argvec[1] = (unsigned long)(arg1); \
4681  _argvec[2] = (unsigned long)(arg2); \
4682  _argvec[3] = (unsigned long)(arg3); \
4683  _argvec[4] = (unsigned long)(arg4); \
4684  _argvec[5] = (unsigned long)(arg5); \
4685  _argvec[6] = (unsigned long)(arg6); \
4686  _argvec[7] = (unsigned long)(arg7); \
4687  __asm__ volatile( \
4688  "ld $4, 8(%1)\n\t" \
4689  "ld $5, 16(%1)\n\t" \
4690  "ld $6, 24(%1)\n\t" \
4691  "ld $7, 32(%1)\n\t" \
4692  "ld $8, 40(%1)\n\t" \
4693  "ld $9, 48(%1)\n\t" \
4694  "ld $10, 56(%1)\n\t" \
4695  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4696  VALGRIND_CALL_NOREDIR_T9 \
4697  "move %0, $2\n" \
4698  : /*out*/ "=r" (_res) \
4699  : /*in*/ "r" (&_argvec[0]) \
4700  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4701  ); \
4702  lval = (__typeof__(lval)) _res; \
4703  } while (0)
4704 
4705 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4706  arg7,arg8) \
4707  do { \
4708  volatile OrigFn _orig = (orig); \
4709  volatile unsigned long _argvec[9]; \
4710  volatile unsigned long _res; \
4711  _argvec[0] = (unsigned long)_orig.nraddr; \
4712  _argvec[1] = (unsigned long)(arg1); \
4713  _argvec[2] = (unsigned long)(arg2); \
4714  _argvec[3] = (unsigned long)(arg3); \
4715  _argvec[4] = (unsigned long)(arg4); \
4716  _argvec[5] = (unsigned long)(arg5); \
4717  _argvec[6] = (unsigned long)(arg6); \
4718  _argvec[7] = (unsigned long)(arg7); \
4719  _argvec[8] = (unsigned long)(arg8); \
4720  __asm__ volatile( \
4721  "ld $4, 8(%1)\n\t" \
4722  "ld $5, 16(%1)\n\t" \
4723  "ld $6, 24(%1)\n\t" \
4724  "ld $7, 32(%1)\n\t" \
4725  "ld $8, 40(%1)\n\t" \
4726  "ld $9, 48(%1)\n\t" \
4727  "ld $10, 56(%1)\n\t" \
4728  "ld $11, 64(%1)\n\t" \
4729  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4730  VALGRIND_CALL_NOREDIR_T9 \
4731  "move %0, $2\n" \
4732  : /*out*/ "=r" (_res) \
4733  : /*in*/ "r" (&_argvec[0]) \
4734  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4735  ); \
4736  lval = (__typeof__(lval)) _res; \
4737  } while (0)
4738 
4739 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4740  arg7,arg8,arg9) \
4741  do { \
4742  volatile OrigFn _orig = (orig); \
4743  volatile unsigned long _argvec[10]; \
4744  volatile unsigned long _res; \
4745  _argvec[0] = (unsigned long)_orig.nraddr; \
4746  _argvec[1] = (unsigned long)(arg1); \
4747  _argvec[2] = (unsigned long)(arg2); \
4748  _argvec[3] = (unsigned long)(arg3); \
4749  _argvec[4] = (unsigned long)(arg4); \
4750  _argvec[5] = (unsigned long)(arg5); \
4751  _argvec[6] = (unsigned long)(arg6); \
4752  _argvec[7] = (unsigned long)(arg7); \
4753  _argvec[8] = (unsigned long)(arg8); \
4754  _argvec[9] = (unsigned long)(arg9); \
4755  __asm__ volatile( \
4756  "dsubu $29, $29, 8\n\t" \
4757  "ld $4, 72(%1)\n\t" \
4758  "sd $4, 0($29)\n\t" \
4759  "ld $4, 8(%1)\n\t" \
4760  "ld $5, 16(%1)\n\t" \
4761  "ld $6, 24(%1)\n\t" \
4762  "ld $7, 32(%1)\n\t" \
4763  "ld $8, 40(%1)\n\t" \
4764  "ld $9, 48(%1)\n\t" \
4765  "ld $10, 56(%1)\n\t" \
4766  "ld $11, 64(%1)\n\t" \
4767  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4768  VALGRIND_CALL_NOREDIR_T9 \
4769  "daddu $29, $29, 8\n\t" \
4770  "move %0, $2\n" \
4771  : /*out*/ "=r" (_res) \
4772  : /*in*/ "r" (&_argvec[0]) \
4773  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4774  ); \
4775  lval = (__typeof__(lval)) _res; \
4776  } while (0)
4777 
4778 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4779  arg7,arg8,arg9,arg10) \
4780  do { \
4781  volatile OrigFn _orig = (orig); \
4782  volatile unsigned long _argvec[11]; \
4783  volatile unsigned long _res; \
4784  _argvec[0] = (unsigned long)_orig.nraddr; \
4785  _argvec[1] = (unsigned long)(arg1); \
4786  _argvec[2] = (unsigned long)(arg2); \
4787  _argvec[3] = (unsigned long)(arg3); \
4788  _argvec[4] = (unsigned long)(arg4); \
4789  _argvec[5] = (unsigned long)(arg5); \
4790  _argvec[6] = (unsigned long)(arg6); \
4791  _argvec[7] = (unsigned long)(arg7); \
4792  _argvec[8] = (unsigned long)(arg8); \
4793  _argvec[9] = (unsigned long)(arg9); \
4794  _argvec[10] = (unsigned long)(arg10); \
4795  __asm__ volatile( \
4796  "dsubu $29, $29, 16\n\t" \
4797  "ld $4, 72(%1)\n\t" \
4798  "sd $4, 0($29)\n\t" \
4799  "ld $4, 80(%1)\n\t" \
4800  "sd $4, 8($29)\n\t" \
4801  "ld $4, 8(%1)\n\t" \
4802  "ld $5, 16(%1)\n\t" \
4803  "ld $6, 24(%1)\n\t" \
4804  "ld $7, 32(%1)\n\t" \
4805  "ld $8, 40(%1)\n\t" \
4806  "ld $9, 48(%1)\n\t" \
4807  "ld $10, 56(%1)\n\t" \
4808  "ld $11, 64(%1)\n\t" \
4809  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4810  VALGRIND_CALL_NOREDIR_T9 \
4811  "daddu $29, $29, 16\n\t" \
4812  "move %0, $2\n" \
4813  : /*out*/ "=r" (_res) \
4814  : /*in*/ "r" (&_argvec[0]) \
4815  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4816  ); \
4817  lval = (__typeof__(lval)) _res; \
4818  } while (0)
4819 
4820 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4821  arg6,arg7,arg8,arg9,arg10, \
4822  arg11) \
4823  do { \
4824  volatile OrigFn _orig = (orig); \
4825  volatile unsigned long _argvec[12]; \
4826  volatile unsigned long _res; \
4827  _argvec[0] = (unsigned long)_orig.nraddr; \
4828  _argvec[1] = (unsigned long)(arg1); \
4829  _argvec[2] = (unsigned long)(arg2); \
4830  _argvec[3] = (unsigned long)(arg3); \
4831  _argvec[4] = (unsigned long)(arg4); \
4832  _argvec[5] = (unsigned long)(arg5); \
4833  _argvec[6] = (unsigned long)(arg6); \
4834  _argvec[7] = (unsigned long)(arg7); \
4835  _argvec[8] = (unsigned long)(arg8); \
4836  _argvec[9] = (unsigned long)(arg9); \
4837  _argvec[10] = (unsigned long)(arg10); \
4838  _argvec[11] = (unsigned long)(arg11); \
4839  __asm__ volatile( \
4840  "dsubu $29, $29, 24\n\t" \
4841  "ld $4, 72(%1)\n\t" \
4842  "sd $4, 0($29)\n\t" \
4843  "ld $4, 80(%1)\n\t" \
4844  "sd $4, 8($29)\n\t" \
4845  "ld $4, 88(%1)\n\t" \
4846  "sd $4, 16($29)\n\t" \
4847  "ld $4, 8(%1)\n\t" \
4848  "ld $5, 16(%1)\n\t" \
4849  "ld $6, 24(%1)\n\t" \
4850  "ld $7, 32(%1)\n\t" \
4851  "ld $8, 40(%1)\n\t" \
4852  "ld $9, 48(%1)\n\t" \
4853  "ld $10, 56(%1)\n\t" \
4854  "ld $11, 64(%1)\n\t" \
4855  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4856  VALGRIND_CALL_NOREDIR_T9 \
4857  "daddu $29, $29, 24\n\t" \
4858  "move %0, $2\n" \
4859  : /*out*/ "=r" (_res) \
4860  : /*in*/ "r" (&_argvec[0]) \
4861  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4862  ); \
4863  lval = (__typeof__(lval)) _res; \
4864  } while (0)
4865 
4866 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4867  arg6,arg7,arg8,arg9,arg10, \
4868  arg11,arg12) \
4869  do { \
4870  volatile OrigFn _orig = (orig); \
4871  volatile unsigned long _argvec[13]; \
4872  volatile unsigned long _res; \
4873  _argvec[0] = (unsigned long)_orig.nraddr; \
4874  _argvec[1] = (unsigned long)(arg1); \
4875  _argvec[2] = (unsigned long)(arg2); \
4876  _argvec[3] = (unsigned long)(arg3); \
4877  _argvec[4] = (unsigned long)(arg4); \
4878  _argvec[5] = (unsigned long)(arg5); \
4879  _argvec[6] = (unsigned long)(arg6); \
4880  _argvec[7] = (unsigned long)(arg7); \
4881  _argvec[8] = (unsigned long)(arg8); \
4882  _argvec[9] = (unsigned long)(arg9); \
4883  _argvec[10] = (unsigned long)(arg10); \
4884  _argvec[11] = (unsigned long)(arg11); \
4885  _argvec[12] = (unsigned long)(arg12); \
4886  __asm__ volatile( \
4887  "dsubu $29, $29, 32\n\t" \
4888  "ld $4, 72(%1)\n\t" \
4889  "sd $4, 0($29)\n\t" \
4890  "ld $4, 80(%1)\n\t" \
4891  "sd $4, 8($29)\n\t" \
4892  "ld $4, 88(%1)\n\t" \
4893  "sd $4, 16($29)\n\t" \
4894  "ld $4, 96(%1)\n\t" \
4895  "sd $4, 24($29)\n\t" \
4896  "ld $4, 8(%1)\n\t" \
4897  "ld $5, 16(%1)\n\t" \
4898  "ld $6, 24(%1)\n\t" \
4899  "ld $7, 32(%1)\n\t" \
4900  "ld $8, 40(%1)\n\t" \
4901  "ld $9, 48(%1)\n\t" \
4902  "ld $10, 56(%1)\n\t" \
4903  "ld $11, 64(%1)\n\t" \
4904  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4905  VALGRIND_CALL_NOREDIR_T9 \
4906  "daddu $29, $29, 32\n\t" \
4907  "move %0, $2\n" \
4908  : /*out*/ "=r" (_res) \
4909  : /*in*/ "r" (&_argvec[0]) \
4910  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4911  ); \
4912  lval = (__typeof__(lval)) _res; \
4913  } while (0)
4914 
4915 #endif /* PLAT_mips64_linux */
4916 
4917 
4918 /* ------------------------------------------------------------------ */
4919 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
4920 /* */
4921 /* ------------------------------------------------------------------ */
4922 
4923 /* Some request codes. There are many more of these, but most are not
4924  exposed to end-user view. These are the public ones, all of the
4925  form 0x1000 + small_number.
4926 
4927  Core ones are in the range 0x00000000--0x0000ffff. The non-public
4928  ones start at 0x2000.
4929 */
4930 
4931 /* These macros are used by tools -- they must be public, but don't
4932  embed them into other programs. */
4933 #define VG_USERREQ_TOOL_BASE(a,b) \
4934  ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
4935 #define VG_IS_TOOL_USERREQ(a, b, v) \
4936  (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
4937 
4938 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
4939  This enum comprises an ABI exported by Valgrind to programs
4940  which use client requests. DO NOT CHANGE THE ORDER OF THESE
4941  ENTRIES, NOR DELETE ANY -- add new ones at the end. */
4942 typedef
4945 
4946  /* These allow any function to be called from the simulated
4947  CPU but run on the real CPU. Nb: the first arg passed to
4948  the function is always the ThreadId of the running
4949  thread! So CLIENT_CALL0 actually requires a 1 arg
4950  function, etc. */
4955 
4956  /* Can be useful in regression testing suites -- eg. can
4957  send Valgrind's output to /dev/null and still count
4958  errors. */
4960 
4961  /* Allows the client program and/or gdbserver to execute a monitor
4962  command. */
4964 
4965  /* These are useful and can be interpreted by any tool that
4966  tracks malloc() et al, by using vg_replace_malloc.c. */
4970  /* Memory pool support. */
4979 
4980  /* Allow printfs to valgrind log. */
4981  /* The first two pass the va_list argument by value, which
4982  assumes it is the same size as or smaller than a UWord,
4983  which generally isn't the case. Hence are deprecated.
4984  The second two pass the vargs by reference and so are
4985  immune to this problem. */
4986  /* both :: char* fmt, va_list vargs (DEPRECATED) */
4989  /* both :: char* fmt, va_list* vargs */
4992 
4993  /* Stack support. */
4997 
4998  /* Wine support */
5000 
5001  /* Querying of debug info. */
5003 
5004  /* Disable/enable error reporting level. Takes a single
5005  Word arg which is the delta to this thread's error
5006  disablement indicator. Hence 1 disables or further
5007  disables errors, and -1 moves back towards enablement.
5008  Other values are not allowed. */
5010 
5011  /* Initialise IR injection */
5013  } Vg_ClientRequest;
5014 
5015 #if !defined(__GNUC__)
5016 # define __extension__ /* */
5017 #endif
5018 
5019 
5020 /* Returns the number of Valgrinds this code is running under. That
5021  is, 0 if running natively, 1 if running under Valgrind, 2 if
5022  running under Valgrind which is running under another Valgrind,
5023  etc. */
5024 #define RUNNING_ON_VALGRIND \
5025  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5026  VG_USERREQ__RUNNING_ON_VALGRIND, \
5027  0, 0, 0, 0, 0) \
5028 
5029 
5030 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5031  _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5032  since it provides a way to make sure valgrind will retranslate the
5033  invalidated area. Returns no value. */
5034 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5035  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5036  _qzz_addr, _qzz_len, 0, 0, 0)
5037 
5038 
5039 /* These requests are for getting Valgrind itself to print something.
5040  Possibly with a backtrace. This is a really ugly hack. The return value
5041  is the number of characters printed, excluding the "**<pid>** " part at the
5042  start and the backtrace (if present). */
5043 
5044 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5045 /* Modern GCC will optimize the static routine out if unused,
5046  and unused attribute will shut down warnings about it. */
5047 static int VALGRIND_PRINTF(const char *format, ...)
5048  __attribute__((format(__printf__, 1, 2), __unused__));
5049 #endif
5050 static int
5051 #if defined(_MSC_VER)
5052 __inline
5053 #endif
5054 VALGRIND_PRINTF(const char *format, ...)
5055 {
5056 #if defined(NVALGRIND)
5057  return 0;
5058 #else /* NVALGRIND */
5059 #if defined(_MSC_VER) || defined(__MINGW64__)
5060  uintptr_t _qzz_res;
5061 #else
5062  unsigned long _qzz_res;
5063 #endif
5064  va_list vargs;
5065  va_start(vargs, format);
5066 #if defined(_MSC_VER) || defined(__MINGW64__)
5067  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5069  (uintptr_t)format,
5070  (uintptr_t)&vargs,
5071  0, 0, 0);
5072 #else
5073  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5075  (unsigned long)format,
5076  (unsigned long)&vargs,
5077  0, 0, 0);
5078 #endif
5079  va_end(vargs);
5080  return (int)_qzz_res;
5081 #endif /* NVALGRIND */
5082 }
5083 
5084 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5085 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5086  __attribute__((format(__printf__, 1, 2), __unused__));
5087 #endif
5088 static int
5089 #if defined(_MSC_VER)
5090 __inline
5091 #endif
5092 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5093 {
5094 #if defined(NVALGRIND)
5095  return 0;
5096 #else /* NVALGRIND */
5097 #if defined(_MSC_VER) || defined(__MINGW64__)
5098  uintptr_t _qzz_res;
5099 #else
5100  unsigned long _qzz_res;
5101 #endif
5102  va_list vargs;
5103  va_start(vargs, format);
5104 #if defined(_MSC_VER) || defined(__MINGW64__)
5105  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5107  (uintptr_t)format,
5108  (uintptr_t)&vargs,
5109  0, 0, 0);
5110 #else
5111  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5113  (unsigned long)format,
5114  (unsigned long)&vargs,
5115  0, 0, 0);
5116 #endif
5117  va_end(vargs);
5118  return (int)_qzz_res;
5119 #endif /* NVALGRIND */
5120 }
5121 
5122 
5123 /* These requests allow control to move from the simulated CPU to the
5124  real CPU, calling an arbitary function.
5125 
5126  Note that the current ThreadId is inserted as the first argument.
5127  So this call:
5128 
5129  VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5130 
5131  requires f to have this signature:
5132 
5133  Word f(Word tid, Word arg1, Word arg2)
5134 
5135  where "Word" is a word-sized type.
5136 
5137  Note that these client requests are not entirely reliable. For example,
5138  if you call a function with them that subsequently calls printf(),
5139  there's a high chance Valgrind will crash. Generally, your prospects of
5140  these working are made higher if the called function does not refer to
5141  any global variables, and does not refer to any libc or other functions
5142  (printf et al). Any kind of entanglement with libc or dynamic linking is
5143  likely to have a bad outcome, for tricky reasons which we've grappled
5144  with a lot in the past.
5145 */
5146 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5147  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5148  VG_USERREQ__CLIENT_CALL0, \
5149  _qyy_fn, \
5150  0, 0, 0, 0)
5151 
5152 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5153  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5154  VG_USERREQ__CLIENT_CALL1, \
5155  _qyy_fn, \
5156  _qyy_arg1, 0, 0, 0)
5157 
5158 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5159  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5160  VG_USERREQ__CLIENT_CALL2, \
5161  _qyy_fn, \
5162  _qyy_arg1, _qyy_arg2, 0, 0)
5163 
5164 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5165  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5166  VG_USERREQ__CLIENT_CALL3, \
5167  _qyy_fn, \
5168  _qyy_arg1, _qyy_arg2, \
5169  _qyy_arg3, 0)
5170 
5171 
5172 /* Counts the number of errors that have been recorded by a tool. Nb:
5173  the tool must record the errors with VG_(maybe_record_error)() or
5174  VG_(unique_error)() for them to be counted. */
5175 #define VALGRIND_COUNT_ERRORS \
5176  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5177  0 /* default return */, \
5178  VG_USERREQ__COUNT_ERRORS, \
5179  0, 0, 0, 0, 0)
5180 
5181 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5182  when heap blocks are allocated in order to give accurate results. This
5183  happens automatically for the standard allocator functions such as
5184  malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5185  delete[], etc.
5186 
5187  But if your program uses a custom allocator, this doesn't automatically
5188  happen, and Valgrind will not do as well. For example, if you allocate
5189  superblocks with mmap() and then allocates chunks of the superblocks, all
5190  Valgrind's observations will be at the mmap() level and it won't know that
5191  the chunks should be considered separate entities. In Memcheck's case,
5192  that means you probably won't get heap block overrun detection (because
5193  there won't be redzones marked as unaddressable) and you definitely won't
5194  get any leak detection.
5195 
5196  The following client requests allow a custom allocator to be annotated so
5197  that it can be handled accurately by Valgrind.
5198 
5199  VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5200  by a malloc()-like function. For Memcheck (an illustrative case), this
5201  does two things:
5202 
5203  - It records that the block has been allocated. This means any addresses
5204  within the block mentioned in error messages will be
5205  identified as belonging to the block. It also means that if the block
5206  isn't freed it will be detected by the leak checker.
5207 
5208  - It marks the block as being addressable and undefined (if 'is_zeroed' is
5209  not set), or addressable and defined (if 'is_zeroed' is set). This
5210  controls how accesses to the block by the program are handled.
5211 
5212  'addr' is the start of the usable block (ie. after any
5213  redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5214  can apply redzones -- these are blocks of padding at the start and end of
5215  each block. Adding redzones is recommended as it makes it much more likely
5216  Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5217  zeroed (or filled with another predictable value), as is the case for
5218  calloc().
5219 
5220  VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5221  heap block -- that will be used by the client program -- is allocated.
5222  It's best to put it at the outermost level of the allocator if possible;
5223  for example, if you have a function my_alloc() which calls
5224  internal_alloc(), and the client request is put inside internal_alloc(),
5225  stack traces relating to the heap block will contain entries for both
5226  my_alloc() and internal_alloc(), which is probably not what you want.
5227 
5228  For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5229  custom blocks from within a heap block, B, that has been allocated with
5230  malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5231  -- the custom blocks will take precedence.
5232 
5233  VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5234  Memcheck, it does two things:
5235 
5236  - It records that the block has been deallocated. This assumes that the
5237  block was annotated as having been allocated via
5238  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5239 
5240  - It marks the block as being unaddressable.
5241 
5242  VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5243  heap block is deallocated.
5244 
5245  VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5246  Memcheck, it does four things:
5247 
5248  - It records that the size of a block has been changed. This assumes that
5249  the block was annotated as having been allocated via
5250  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5251 
5252  - If the block shrunk, it marks the freed memory as being unaddressable.
5253 
5254  - If the block grew, it marks the new area as undefined and defines a red
5255  zone past the end of the new block.
5256 
5257  - The V-bits of the overlap between the old and the new block are preserved.
5258 
5259  VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5260  and before deallocation of the old block.
5261 
5262  In many cases, these three client requests will not be enough to get your
5263  allocator working well with Memcheck. More specifically, if your allocator
5264  writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5265  will be necessary to mark the memory as addressable just before the zeroing
5266  occurs, otherwise you'll get a lot of invalid write errors. For example,
5267  you'll need to do this if your allocator recycles freed blocks, but it
5268  zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5269  Alternatively, if your allocator reuses freed blocks for allocator-internal
5270  data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5271 
5272  Really, what's happening is a blurring of the lines between the client
5273  program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5274  memory should be considered unaddressable to the client program, but the
5275  allocator knows more than the rest of the client program and so may be able
5276  to safely access it. Extra client requests are necessary for Valgrind to
5277  understand the distinction between the allocator and the rest of the
5278  program.
5279 
5280  Ignored if addr == 0.
5281 */
5282 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5283  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5284  addr, sizeB, rzB, is_zeroed, 0)
5285 
5286 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5287  Ignored if addr == 0.
5288 */
5289 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5290  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5291  addr, oldSizeB, newSizeB, rzB, 0)
5292 
5293 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5294  Ignored if addr == 0.
5295 */
5296 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5297  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5298  addr, rzB, 0, 0, 0)
5299 
5300 /* Create a memory pool. */
5301 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5302  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5303  pool, rzB, is_zeroed, 0, 0)
5304 
5305 /* Destroy a memory pool. */
5306 #define VALGRIND_DESTROY_MEMPOOL(pool) \
5307  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5308  pool, 0, 0, 0, 0)
5309 
5310 /* Associate a piece of memory with a memory pool. */
5311 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5312  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5313  pool, addr, size, 0, 0)
5314 
5315 /* Disassociate a piece of memory from a memory pool. */
5316 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
5317  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5318  pool, addr, 0, 0, 0)
5319 
5320 /* Disassociate any pieces outside a particular range. */
5321 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5322  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5323  pool, addr, size, 0, 0)
5324 
5325 /* Resize and/or move a piece associated with a memory pool. */
5326 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5327  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5328  poolA, poolB, 0, 0, 0)
5329 
5330 /* Resize and/or move a piece associated with a memory pool. */
5331 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5332  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5333  pool, addrA, addrB, size, 0)
5334 
5335 /* Return 1 if a mempool exists, else 0. */
5336 #define VALGRIND_MEMPOOL_EXISTS(pool) \
5337  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5338  VG_USERREQ__MEMPOOL_EXISTS, \
5339  pool, 0, 0, 0, 0)
5340 
5341 /* Mark a piece of memory as being a stack. Returns a stack id. */
5342 #define VALGRIND_STACK_REGISTER(start, end) \
5343  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5344  VG_USERREQ__STACK_REGISTER, \
5345  start, end, 0, 0, 0)
5346 
5347 /* Unmark the piece of memory associated with a stack id as being a
5348  stack. */
5349 #define VALGRIND_STACK_DEREGISTER(id) \
5350  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
5351  id, 0, 0, 0, 0)
5352 
5353 /* Change the start and end address of the stack id. */
5354 #define VALGRIND_STACK_CHANGE(id, start, end) \
5355  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
5356  id, start, end, 0, 0)
5357 
5358 /* Load PDB debug info for Wine PE image_map. */
5359 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
5360  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
5361  fd, ptr, total_size, delta, 0)
5362 
5363 /* Map a code address to a source file name and line number. buf64
5364  must point to a 64-byte buffer in the caller's address space. The
5365  result will be dumped in there and is guaranteed to be zero
5366  terminated. If no info is found, the first byte is set to zero. */
5367 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
5368  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5369  VG_USERREQ__MAP_IP_TO_SRCLOC, \
5370  addr, buf64, 0, 0, 0)
5371 
5372 /* Disable error reporting for this thread. Behaves in a stack like
5373  way, so you can safely call this multiple times provided that
5374  VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
5375  to re-enable reporting. The first call of this macro disables
5376  reporting. Subsequent calls have no effect except to increase the
5377  number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
5378  reporting. Child threads do not inherit this setting from their
5379  parents -- they are always created with reporting enabled. */
5380 #define VALGRIND_DISABLE_ERROR_REPORTING \
5381  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5382  1, 0, 0, 0, 0)
5383 
5384 /* Re-enable error reporting, as per comments on
5385  VALGRIND_DISABLE_ERROR_REPORTING. */
5386 #define VALGRIND_ENABLE_ERROR_REPORTING \
5387  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5388  -1, 0, 0, 0, 0)
5389 
5390 /* Execute a monitor command from the client program.
5391  If a connection is opened with GDB, the output will be sent
5392  according to the output mode set for vgdb.
5393  If no connection is opened, output will go to the log output.
5394  Returns 1 if command not recognised, 0 otherwise. */
5395 #define VALGRIND_MONITOR_COMMAND(command) \
5396  VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
5397  command, 0, 0, 0, 0)
5398 
5399 
5400 #undef PLAT_x86_darwin
5401 #undef PLAT_amd64_darwin
5402 #undef PLAT_x86_win32
5403 #undef PLAT_amd64_win64
5404 #undef PLAT_x86_linux
5405 #undef PLAT_amd64_linux
5406 #undef PLAT_ppc32_linux
5407 #undef PLAT_ppc64_linux
5408 #undef PLAT_arm_linux
5409 #undef PLAT_s390x_linux
5410 #undef PLAT_mips32_linux
5411 #undef PLAT_mips64_linux
5412 
5413 #endif /* __VALGRIND_H */
#define __attribute__(x)
Definition: System.cpp:72
GAUDI_API std::string format(const char *,...)
MsgStream format utility "a la sprintf(...)".
Definition: MsgStream.cpp:119
#define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5)
Vg_ClientRequest