Loading [MathJax]/extensions/tex2jax.js
The Gaudi Framework  v31r0 (aeb156f0)
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
local_valgrind.h
Go to the documentation of this file.
1 /* -*- c -*-
2  ----------------------------------------------------------------
3 
4  Notice that the following BSD-style license applies to this one
5  file (valgrind.h) only. The rest of Valgrind is licensed under the
6  terms of the GNU General Public License, version 2, unless
7  otherwise indicated. See the COPYING file in the source
8  distribution for details.
9 
10  ----------------------------------------------------------------
11 
12  This file is part of Valgrind, a dynamic binary instrumentation
13  framework.
14 
15  Copyright (C) 2000-2013 Julian Seward. All rights reserved.
16 
17  Redistribution and use in source and binary forms, with or without
18  modification, are permitted provided that the following conditions
19  are met:
20 
21  1. Redistributions of source code must retain the above copyright
22  notice, this list of conditions and the following disclaimer.
23 
24  2. The origin of this software must not be misrepresented; you must
25  not claim that you wrote the original software. If you use this
26  software in a product, an acknowledgment in the product
27  documentation would be appreciated but is not required.
28 
29  3. Altered source versions must be plainly marked as such, and must
30  not be misrepresented as being the original software.
31 
32  4. The name of the author may not be used to endorse or promote
33  products derived from this software without specific prior written
34  permission.
35 
36  THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37  OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39  ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42  GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44  WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47 
48  ----------------------------------------------------------------
49 
50  Notice that the above BSD-style license applies to this one file
51  (valgrind.h) only. The entire rest of Valgrind is licensed under
52  the terms of the GNU General Public License, version 2. See the
53  COPYING file in the source distribution for details.
54 
55  ----------------------------------------------------------------
56 */
57 
58 /* This file is for inclusion into client (your!) code.
59 
60  You can use these macros to manipulate and query Valgrind's
61  execution inside your own programs.
62 
63  The resulting executables will still run without Valgrind, just a
64  little bit more slowly than they otherwise would, but otherwise
65  unchanged. When not running on valgrind, each client request
66  consumes very few (eg. 7) instructions, so the resulting performance
67  loss is negligible unless you plan to execute client requests
68  millions of times per second. Nevertheless, if that is still a
69  problem, you can compile with the NVALGRIND symbol defined (gcc
70  -DNVALGRIND) so that client requests are not even compiled in. */
71 
72 #ifndef __VALGRIND_H
73 # define __VALGRIND_H
74 
75 /* ------------------------------------------------------------------ */
76 /* VERSION NUMBER OF VALGRIND */
77 /* ------------------------------------------------------------------ */
78 
79 /* Specify Valgrind's version number, so that user code can
80  conditionally compile based on our version number. Note that these
81  were introduced at version 3.6 and so do not exist in version 3.5
82  or earlier. The recommended way to use them to check for "version
83  X.Y or later" is (eg)
84 
85 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
86  && (__VALGRIND_MAJOR__ > 3 \
87  || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
88 */
89 # define __VALGRIND_MAJOR__ 3
90 # define __VALGRIND_MINOR__ 8
91 
92 # include <stdarg.h>
93 
94 /* Nb: this file might be included in a file compiled with -ansi. So
95  we can't use C++ style "//" comments nor the "asm" keyword (instead
96  use "__asm__"). */
97 
98 /* Derive some tags indicating what the target platform is. Note
99  that in this file we're using the compiler's CPP symbols for
100  identifying architectures, which are different to the ones we use
101  within the rest of Valgrind. Note, __powerpc__ is active for both
102  32 and 64-bit PPC, whereas __powerpc64__ is only active for the
103  latter (on Linux, that is).
104 
105  Misc note: how to find out what's predefined in gcc by default:
106  gcc -Wp,-dM somefile.c
107 */
108 # undef PLAT_x86_darwin
109 # undef PLAT_amd64_darwin
110 # undef PLAT_x86_win32
111 # undef PLAT_amd64_win64
112 # undef PLAT_x86_linux
113 # undef PLAT_amd64_linux
114 # undef PLAT_ppc32_linux
115 # undef PLAT_ppc64_linux
116 # undef PLAT_arm_linux
117 # undef PLAT_s390x_linux
118 # undef PLAT_mips32_linux
119 # undef PLAT_mips64_linux
120 
121 # if defined( __APPLE__ ) && defined( __i386__ )
122 # define PLAT_x86_darwin 1
123 # elif defined( __APPLE__ ) && defined( __x86_64__ )
124 # define PLAT_amd64_darwin 1
125 # elif defined( __MINGW32__ ) || defined( __CYGWIN32__ ) || ( defined( _WIN32 ) && defined( _M_IX86 ) )
126 # define PLAT_x86_win32 1
127 # elif defined( __MINGW64__ ) || ( defined( _WIN64 ) && defined( _M_X64 ) )
128 # define PLAT_amd64_win64 1
129 # elif defined( __linux__ ) && defined( __i386__ )
130 # define PLAT_x86_linux 1
131 # elif defined( __linux__ ) && defined( __x86_64__ )
132 # define PLAT_amd64_linux 1
133 # elif defined( __linux__ ) && defined( __powerpc__ ) && !defined( __powerpc64__ )
134 # define PLAT_ppc32_linux 1
135 # elif defined( __linux__ ) && defined( __powerpc__ ) && defined( __powerpc64__ )
136 # define PLAT_ppc64_linux 1
137 # elif defined( __linux__ ) && defined( __arm__ )
138 # define PLAT_arm_linux 1
139 # elif defined( __linux__ ) && defined( __s390__ ) && defined( __s390x__ )
140 # define PLAT_s390x_linux 1
141 # elif defined( __linux__ ) && defined( __mips__ )
142 # if ( __mips == 64 )
143 # define PLAT_mips64_linux 1
144 # else
145 # define PLAT_mips32_linux 1
146 # endif
147 # else
148 /* If we're not compiling for our target platform, don't generate
149  any inline asms. */
150 # if !defined( NVALGRIND )
151 # define NVALGRIND 1
152 # endif
153 # endif
154 
155 /* ------------------------------------------------------------------ */
156 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
157 /* in here of use to end-users -- skip to the next section. */
158 /* ------------------------------------------------------------------ */
159 
160 /*
161  * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
162  * request. Accepts both pointers and integers as arguments.
163  *
164  * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
165  * client request that does not return a value.
166 
167  * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
168  * client request and whose value equals the client request result. Accepts
169  * both pointers and integers as arguments. Note that such calls are not
170  * necessarily pure functions -- they may have side effects.
171  */
172 
173 # define VALGRIND_DO_CLIENT_REQUEST( _zzq_rlval, _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, \
174  _zzq_arg4, _zzq_arg5 ) \
175  do { \
176  ( _zzq_rlval ) = VALGRIND_DO_CLIENT_REQUEST_EXPR( ( _zzq_default ), ( _zzq_request ), ( _zzq_arg1 ), \
177  ( _zzq_arg2 ), ( _zzq_arg3 ), ( _zzq_arg4 ), ( _zzq_arg5 ) ); \
178  } while ( 0 )
179 
180 # define VALGRIND_DO_CLIENT_REQUEST_STMT( _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5 ) \
181  do { \
182  (void)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, ( _zzq_request ), ( _zzq_arg1 ), ( _zzq_arg2 ), ( _zzq_arg3 ), \
183  ( _zzq_arg4 ), ( _zzq_arg5 ) ); \
184  } while ( 0 )
185 
186 # if defined( NVALGRIND )
187 
188 /* Define NVALGRIND to completely remove the Valgrind magic sequence
189  from the compiled code (analogous to NDEBUG's effects on
190  assert()) */
191 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
192  _zzq_arg5 ) \
193  ( _zzq_default )
194 
195 # else /* ! NVALGRIND */
196 
197 /* The following defines the magic code sequences which the JITter
198  spots and handles magically. Don't look too closely at them as
199  they will rot your brain.
200 
201  The assembly code sequences for all architectures is in this one
202  file. This is because this file must be stand-alone, and we don't
203  want to have multiple files.
204 
205  For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
206  value gets put in the return slot, so that everything works when
207  this is executed not under Valgrind. Args are passed in a memory
208  block, and so there's no intrinsic limit to the number that could
209  be passed, but it's currently five.
210 
211  The macro args are:
212  _zzq_rlval result lvalue
213  _zzq_default default value (result returned when running on real CPU)
214  _zzq_request request code
215  _zzq_arg1..5 request params
216 
217  The other two macros are used to support function wrapping, and are
218  a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
219  guest's NRADDR pseudo-register and whatever other information is
220  needed to safely run the call original from the wrapper: on
221  ppc64-linux, the R2 value at the divert point is also needed. This
222  information is abstracted into a user-visible type, OrigFn.
223 
224  VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
225  guest, but guarantees that the branch instruction will not be
226  redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
227  branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
228  complete inline asm, since it needs to be combined with more magic
229  inline asm stuff to be useful.
230 */
231 
232 /* ------------------------- x86-{linux,darwin} ---------------- */
233 
234 # if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin ) || ( defined( PLAT_x86_win32 ) && defined( __GNUC__ ) )
235 
236 typedef struct {
237  unsigned int nraddr; /* where's the code? */
238 } OrigFn;
239 
240 # define __SPECIAL_INSTRUCTION_PREAMBLE \
241  "roll $3, %%edi ; roll $13, %%edi\n\t" \
242  "roll $29, %%edi ; roll $19, %%edi\n\t"
243 
244 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
245  _zzq_arg5 ) \
246  __extension__( { \
247  volatile unsigned int _zzq_args[6]; \
248  volatile unsigned int _zzq_result; \
249  _zzq_args[0] = (unsigned int)( _zzq_request ); \
250  _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
251  _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
252  _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
253  _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
254  _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
255  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %EDX = client_request ( %EAX ) */ \
256  "xchgl %%ebx,%%ebx" \
257  : "=d"( _zzq_result ) \
258  : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
259  : "cc", "memory" ); \
260  _zzq_result; \
261  } )
262 
263 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
264  { \
265  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
266  volatile unsigned int __addr; \
267  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %EAX = guest_NRADDR */ \
268  "xchgl %%ecx,%%ecx" \
269  : "=a"( __addr ) \
270  : \
271  : "cc", "memory" ); \
272  _zzq_orig->nraddr = __addr; \
273  }
274 
275 # define VALGRIND_CALL_NOREDIR_EAX \
276  __SPECIAL_INSTRUCTION_PREAMBLE \
277  /* call-noredir *%EAX */ \
278  "xchgl %%edx,%%edx\n\t"
279 
280 # define VALGRIND_VEX_INJECT_IR() \
281  do { \
282  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgl %%edi,%%edi\n\t" : : : "cc", "memory" ); \
283  } while ( 0 )
284 
285 # endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
286 
287 /* ------------------------- x86-Win32 ------------------------- */
288 
289 # if defined( PLAT_x86_win32 ) && !defined( __GNUC__ )
290 
291 typedef struct {
292  unsigned int nraddr; /* where's the code? */
293 } OrigFn;
294 
295 # if defined( _MSC_VER )
296 
297 # define __SPECIAL_INSTRUCTION_PREAMBLE __asm rol edi, 3 __asm rol edi, 13 __asm rol edi, 29 __asm rol edi, 19
298 
299 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, \
300  _zzq_arg4, _zzq_arg5 ) \
301  valgrind_do_client_request_expr( ( uintptr_t )( _zzq_default ), ( uintptr_t )( _zzq_request ), \
302  ( uintptr_t )( _zzq_arg1 ), ( uintptr_t )( _zzq_arg2 ), \
303  ( uintptr_t )( _zzq_arg3 ), ( uintptr_t )( _zzq_arg4 ), \
304  ( uintptr_t )( _zzq_arg5 ) )
305 
306 static __inline uintptr_t valgrind_do_client_request_expr( uintptr_t _zzq_default, uintptr_t _zzq_request,
307  uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
308  uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
309  uintptr_t _zzq_arg5 ) {
310  volatile uintptr_t _zzq_args[6];
311  volatile unsigned int _zzq_result;
312  _zzq_args[0] = ( uintptr_t )( _zzq_request );
313  _zzq_args[1] = ( uintptr_t )( _zzq_arg1 );
314  _zzq_args[2] = ( uintptr_t )( _zzq_arg2 );
315  _zzq_args[3] = ( uintptr_t )( _zzq_arg3 );
316  _zzq_args[4] = ( uintptr_t )( _zzq_arg4 );
317  _zzq_args[5] = ( uintptr_t )( _zzq_arg5 );
318  __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
319  __SPECIAL_INSTRUCTION_PREAMBLE
320  /* %EDX = client_request ( %EAX ) */
321  __asm xchg ebx,ebx
322  __asm mov _zzq_result, edx
323  }
324  return _zzq_result;
325 }
326 
327 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
328  { \
329  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
330  volatile unsigned int __addr; \
331  __asm { __SPECIAL_INSTRUCTION_PREAMBLE /* %EAX = guest_NRADDR */ \
332  __asm xchg ecx,ecx \
333  __asm mov __addr, eax} \
334  _zzq_orig->nraddr = __addr; \
335  }
336 
337 # define VALGRIND_CALL_NOREDIR_EAX ERROR
338 
339 # define VALGRIND_VEX_INJECT_IR() \
340  do { \
341  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
342  __asm xchg edi,edi} \
343  } while ( 0 )
344 
345 # else
346 # error Unsupported compiler.
347 # endif
348 
349 # endif /* PLAT_x86_win32 */
350 
351 /* ------------------------ amd64-{linux,darwin} --------------- */
352 
353 # if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin )
354 
355 typedef struct {
356  unsigned long long int nraddr; /* where's the code? */
357 } OrigFn;
358 
359 # define __SPECIAL_INSTRUCTION_PREAMBLE \
360  "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
361  "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
362 
363 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
364  _zzq_arg5 ) \
365  __extension__( { \
366  volatile unsigned long long int _zzq_args[6]; \
367  volatile unsigned long long int _zzq_result; \
368  _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
369  _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
370  _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
371  _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
372  _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
373  _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
374  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %RDX = client_request ( %RAX ) */ \
375  "xchgq %%rbx,%%rbx" \
376  : "=d"( _zzq_result ) \
377  : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
378  : "cc", "memory" ); \
379  _zzq_result; \
380  } )
381 
382 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
383  { \
384  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
385  volatile unsigned long long int __addr; \
386  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %RAX = guest_NRADDR */ \
387  "xchgq %%rcx,%%rcx" \
388  : "=a"( __addr ) \
389  : \
390  : "cc", "memory" ); \
391  _zzq_orig->nraddr = __addr; \
392  }
393 
394 # define VALGRIND_CALL_NOREDIR_RAX \
395  __SPECIAL_INSTRUCTION_PREAMBLE \
396  /* call-noredir *%RAX */ \
397  "xchgq %%rdx,%%rdx\n\t"
398 
399 # define VALGRIND_VEX_INJECT_IR() \
400  do { \
401  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgq %%rdi,%%rdi\n\t" : : : "cc", "memory" ); \
402  } while ( 0 )
403 
404 # endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
405 
406 /* ------------------------ ppc32-linux ------------------------ */
407 
408 # if defined( PLAT_ppc32_linux )
409 
410 typedef struct {
411  unsigned int nraddr; /* where's the code? */
412 } OrigFn;
413 
414 # define __SPECIAL_INSTRUCTION_PREAMBLE \
415  "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
416  "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
417 
418 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
419  _zzq_arg5 ) \
420  \
421  __extension__( { \
422  unsigned int _zzq_args[6]; \
423  unsigned int _zzq_result; \
424  unsigned int* _zzq_ptr; \
425  _zzq_args[0] = (unsigned int)( _zzq_request ); \
426  _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
427  _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
428  _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
429  _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
430  _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
431  _zzq_ptr = _zzq_args; \
432  __asm__ volatile( "mr 3,%1\n\t" /*default*/ \
433  "mr 4,%2\n\t" /*ptr*/ \
434  __SPECIAL_INSTRUCTION_PREAMBLE /* %R3 = client_request ( %R4 ) */ \
435  "or 1,1,1\n\t" \
436  "mr %0,3" /*result*/ \
437  : "=b"( _zzq_result ) \
438  : "b"( _zzq_default ), "b"( _zzq_ptr ) \
439  : "cc", "memory", "r3", "r4" ); \
440  _zzq_result; \
441  } )
442 
443 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
444  { \
445  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
446  unsigned int __addr; \
447  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %R3 = guest_NRADDR */ \
448  "or 2,2,2\n\t" \
449  "mr %0,3" \
450  : "=b"( __addr ) \
451  : \
452  : "cc", "memory", "r3" ); \
453  _zzq_orig->nraddr = __addr; \
454  }
455 
456 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
457  __SPECIAL_INSTRUCTION_PREAMBLE \
458  /* branch-and-link-to-noredir *%R11 */ \
459  "or 3,3,3\n\t"
460 
461 # define VALGRIND_VEX_INJECT_IR() \
462  do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); } while ( 0 )
463 
464 # endif /* PLAT_ppc32_linux */
465 
466 /* ------------------------ ppc64-linux ------------------------ */
467 
468 # if defined( PLAT_ppc64_linux )
469 
470 typedef struct {
471  unsigned long long int nraddr; /* where's the code? */
472  unsigned long long int r2; /* what tocptr do we need? */
473 } OrigFn;
474 
475 # define __SPECIAL_INSTRUCTION_PREAMBLE \
476  "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
477  "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
478 
479 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
480  _zzq_arg5 ) \
481  \
482  __extension__( { \
483  unsigned long long int _zzq_args[6]; \
484  unsigned long long int _zzq_result; \
485  unsigned long long int* _zzq_ptr; \
486  _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
487  _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
488  _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
489  _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
490  _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
491  _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
492  _zzq_ptr = _zzq_args; \
493  __asm__ volatile( "mr 3,%1\n\t" /*default*/ \
494  "mr 4,%2\n\t" /*ptr*/ \
495  __SPECIAL_INSTRUCTION_PREAMBLE /* %R3 = client_request ( %R4 ) */ \
496  "or 1,1,1\n\t" \
497  "mr %0,3" /*result*/ \
498  : "=b"( _zzq_result ) \
499  : "b"( _zzq_default ), "b"( _zzq_ptr ) \
500  : "cc", "memory", "r3", "r4" ); \
501  _zzq_result; \
502  } )
503 
504 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
505  { \
506  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
507  unsigned long long int __addr; \
508  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %R3 = guest_NRADDR */ \
509  "or 2,2,2\n\t" \
510  "mr %0,3" \
511  : "=b"( __addr ) \
512  : \
513  : "cc", "memory", "r3" ); \
514  _zzq_orig->nraddr = __addr; \
515  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %R3 = guest_NRADDR_GPR2 */ \
516  "or 4,4,4\n\t" \
517  "mr %0,3" \
518  : "=b"( __addr ) \
519  : \
520  : "cc", "memory", "r3" ); \
521  _zzq_orig->r2 = __addr; \
522  }
523 
524 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
525  __SPECIAL_INSTRUCTION_PREAMBLE \
526  /* branch-and-link-to-noredir *%R11 */ \
527  "or 3,3,3\n\t"
528 
529 # define VALGRIND_VEX_INJECT_IR() \
530  do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); } while ( 0 )
531 
532 # endif /* PLAT_ppc64_linux */
533 
534 /* ------------------------- arm-linux ------------------------- */
535 
536 # if defined( PLAT_arm_linux )
537 
538 typedef struct {
539  unsigned int nraddr; /* where's the code? */
540 } OrigFn;
541 
542 # define __SPECIAL_INSTRUCTION_PREAMBLE \
543  "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
544  "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
545 
546 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
547  _zzq_arg5 ) \
548  \
549  __extension__( { \
550  volatile unsigned int _zzq_args[6]; \
551  volatile unsigned int _zzq_result; \
552  _zzq_args[0] = (unsigned int)( _zzq_request ); \
553  _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
554  _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
555  _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
556  _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
557  _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
558  __asm__ volatile( "mov r3, %1\n\t" /*default*/ \
559  "mov r4, %2\n\t" /*ptr*/ \
560  __SPECIAL_INSTRUCTION_PREAMBLE /* R3 = client_request ( R4 ) */ \
561  "orr r10, r10, r10\n\t" \
562  "mov %0, r3" /*result*/ \
563  : "=r"( _zzq_result ) \
564  : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
565  : "cc", "memory", "r3", "r4" ); \
566  _zzq_result; \
567  } )
568 
569 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
570  { \
571  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
572  unsigned int __addr; \
573  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* R3 = guest_NRADDR */ \
574  "orr r11, r11, r11\n\t" \
575  "mov %0, r3" \
576  : "=r"( __addr ) \
577  : \
578  : "cc", "memory", "r3" ); \
579  _zzq_orig->nraddr = __addr; \
580  }
581 
582 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
583  __SPECIAL_INSTRUCTION_PREAMBLE \
584  /* branch-and-link-to-noredir *%R4 */ \
585  "orr r12, r12, r12\n\t"
586 
587 # define VALGRIND_VEX_INJECT_IR() \
588  do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "orr r9, r9, r9\n\t" : : : "cc", "memory" ); } while ( 0 )
589 
590 # endif /* PLAT_arm_linux */
591 
592 /* ------------------------ s390x-linux ------------------------ */
593 
594 # if defined( PLAT_s390x_linux )
595 
596 typedef struct {
597  unsigned long long int nraddr; /* where's the code? */
598 } OrigFn;
599 
600 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
601  * code. This detection is implemented in platform specific toIR.c
602  * (e.g. VEX/priv/guest_s390_decoder.c).
603  */
604 # define __SPECIAL_INSTRUCTION_PREAMBLE \
605  "lr 15,15\n\t" \
606  "lr 1,1\n\t" \
607  "lr 2,2\n\t" \
608  "lr 3,3\n\t"
609 
610 # define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
611 # define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
612 # define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
613 # define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
614 
615 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
616  _zzq_arg5 ) \
617  __extension__( { \
618  volatile unsigned long long int _zzq_args[6]; \
619  volatile unsigned long long int _zzq_result; \
620  _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
621  _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
622  _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
623  _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
624  _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
625  _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
626  __asm__ volatile( /* r2 = args */ \
627  "lgr 2,%1\n\t" /* r3 = default */ \
628  "lgr 3,%2\n\t" __SPECIAL_INSTRUCTION_PREAMBLE __CLIENT_REQUEST_CODE /* results = r3 */ \
629  "lgr %0, 3\n\t" \
630  : "=d"( _zzq_result ) \
631  : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
632  : "cc", "2", "3", "memory" ); \
633  _zzq_result; \
634  } )
635 
636 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
637  { \
638  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
639  volatile unsigned long long int __addr; \
640  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __GET_NR_CONTEXT_CODE "lgr %0, 3\n\t" \
641  : "=a"( __addr ) \
642  : \
643  : "cc", "3", "memory" ); \
644  _zzq_orig->nraddr = __addr; \
645  }
646 
647 # define VALGRIND_CALL_NOREDIR_R1 \
648  __SPECIAL_INSTRUCTION_PREAMBLE \
649  __CALL_NO_REDIR_CODE
650 
651 # define VALGRIND_VEX_INJECT_IR() \
652  do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __VEX_INJECT_IR_CODE ); } while ( 0 )
653 
654 # endif /* PLAT_s390x_linux */
655 
656 /* ------------------------- mips32-linux ---------------- */
657 
658 # if defined( PLAT_mips32_linux )
659 
660 typedef struct {
661  unsigned int nraddr; /* where's the code? */
662 } OrigFn;
663 
664 /* .word 0x342
665  * .word 0x742
666  * .word 0xC2
667  * .word 0x4C2*/
668 # define __SPECIAL_INSTRUCTION_PREAMBLE \
669  "srl $0, $0, 13\n\t" \
670  "srl $0, $0, 29\n\t" \
671  "srl $0, $0, 3\n\t" \
672  "srl $0, $0, 19\n\t"
673 
674 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
675  _zzq_arg5 ) \
676  __extension__( { \
677  volatile unsigned int _zzq_args[6]; \
678  volatile unsigned int _zzq_result; \
679  _zzq_args[0] = (unsigned int)( _zzq_request ); \
680  _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
681  _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
682  _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
683  _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
684  _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
685  __asm__ volatile( "move $11, %1\n\t" /*default*/ \
686  "move $12, %2\n\t" /*ptr*/ \
687  __SPECIAL_INSTRUCTION_PREAMBLE /* T3 = client_request ( T4 ) */ \
688  "or $13, $13, $13\n\t" \
689  "move %0, $11\n\t" /*result*/ \
690  : "=r"( _zzq_result ) \
691  : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
692  : "$11", "$12" ); \
693  _zzq_result; \
694  } )
695 
696 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
697  { \
698  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
699  volatile unsigned int __addr; \
700  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* %t9 = guest_NRADDR */ \
701  "or $14, $14, $14\n\t" \
702  "move %0, $11" /*result*/ \
703  : "=r"( __addr ) \
704  : \
705  : "$11" ); \
706  _zzq_orig->nraddr = __addr; \
707  }
708 
709 # define VALGRIND_CALL_NOREDIR_T9 \
710  __SPECIAL_INSTRUCTION_PREAMBLE \
711  /* call-noredir *%t9 */ \
712  "or $15, $15, $15\n\t"
713 
714 # define VALGRIND_VEX_INJECT_IR() \
715  do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); } while ( 0 )
716 
717 # endif /* PLAT_mips32_linux */
718 
719 /* ------------------------- mips64-linux ---------------- */
720 
721 # if defined( PLAT_mips64_linux )
722 
723 typedef struct {
724  unsigned long long nraddr; /* where's the code? */
725 } OrigFn;
726 
727 /* dsll $0,$0, 3
728  * dsll $0,$0, 13
729  * dsll $0,$0, 29
730  * dsll $0,$0, 19*/
731 # define __SPECIAL_INSTRUCTION_PREAMBLE \
732  "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
733  "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
734 
735 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
736  _zzq_arg5 ) \
737  __extension__( { \
738  volatile unsigned long long int _zzq_args[6]; \
739  volatile unsigned long long int _zzq_result; \
740  _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
741  _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
742  _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
743  _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
744  _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
745  _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
746  __asm__ volatile( "move $11, %1\n\t" /*default*/ \
747  "move $12, %2\n\t" /*ptr*/ \
748  __SPECIAL_INSTRUCTION_PREAMBLE /* $11 = client_request ( $12 ) */ \
749  "or $13, $13, $13\n\t" \
750  "move %0, $11\n\t" /*result*/ \
751  : "=r"( _zzq_result ) \
752  : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
753  : "$11", "$12" ); \
754  _zzq_result; \
755  } )
756 
757 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
758  { \
759  volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
760  volatile unsigned long long int __addr; \
761  __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE /* $11 = guest_NRADDR */ \
762  "or $14, $14, $14\n\t" \
763  "move %0, $11" /*result*/ \
764  : "=r"( __addr ) \
765  : \
766  : "$11" ); \
767  _zzq_orig->nraddr = __addr; \
768  }
769 
770 # define VALGRIND_CALL_NOREDIR_T9 \
771  __SPECIAL_INSTRUCTION_PREAMBLE \
772  /* call-noredir $25 */ \
773  "or $15, $15, $15\n\t"
774 
775 # define VALGRIND_VEX_INJECT_IR() \
776  do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); } while ( 0 )
777 
778 # endif /* PLAT_mips64_linux */
779 
780 /* Insert assembly code for other platforms here... */
781 
782 # endif /* NVALGRIND */
783 
784 /* ------------------------------------------------------------------ */
785 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
786 /* ugly. It's the least-worst tradeoff I can think of. */
787 /* ------------------------------------------------------------------ */
788 
789 /* This section defines magic (a.k.a appalling-hack) macros for doing
790  guaranteed-no-redirection macros, so as to get from function
791  wrappers to the functions they are wrapping. The whole point is to
792  construct standard call sequences, but to do the call itself with a
793  special no-redirect call pseudo-instruction that the JIT
794  understands and handles specially. This section is long and
795  repetitious, and I can't see a way to make it shorter.
796 
797  The naming scheme is as follows:
798 
799  CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
800 
801  'W' stands for "word" and 'v' for "void". Hence there are
802  different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
803  and for each, the possibility of returning a word-typed result, or
804  no result.
805 */
806 
807 /* Use these to write the name of your wrapper. NOTE: duplicates
808  VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
809  the default behaviour equivalance class tag "0000" into the name.
810  See pub_tool_redir.h for details -- normally you don't need to
811  think about this, though. */
812 
813 /* Use an extra level of macroisation so as to ensure the soname/fnname
814  args are fully macro-expanded before pasting them together. */
815 # define VG_CONCAT4( _aa, _bb, _cc, _dd ) _aa##_bb##_cc##_dd
816 
817 # define I_WRAP_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgw00000ZU_, soname, _, fnname )
818 
819 # define I_WRAP_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgw00000ZZ_, soname, _, fnname )
820 
821 /* Use this macro from within a wrapper function to collect the
822  context (address and possibly other info) of the original function.
823  Once you have that you can then use it in one of the CALL_FN_
824  macros. The type of the argument _lval is OrigFn. */
825 # define VALGRIND_GET_ORIG_FN( _lval ) VALGRIND_GET_NR_CONTEXT( _lval )
826 
827 /* Also provide end-user facilities for function replacement, rather
828  than wrapping. A replacement function differs from a wrapper in
829  that it has no way to get hold of the original function being
830  called, and hence no way to call onwards to it. In a replacement
831  function, VALGRIND_GET_ORIG_FN always returns zero. */
832 
833 # define I_REPLACE_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgr00000ZU_, soname, _, fnname )
834 
835 # define I_REPLACE_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgr00000ZZ_, soname, _, fnname )
836 
837 /* Derivatives of the main macros below, for calling functions
838  returning void. */
839 
840 # define CALL_FN_v_v( fnptr ) \
841  do { \
842  volatile unsigned long _junk; \
843  CALL_FN_W_v( _junk, fnptr ); \
844  } while ( 0 )
845 
846 # define CALL_FN_v_W( fnptr, arg1 ) \
847  do { \
848  volatile unsigned long _junk; \
849  CALL_FN_W_W( _junk, fnptr, arg1 ); \
850  } while ( 0 )
851 
852 # define CALL_FN_v_WW( fnptr, arg1, arg2 ) \
853  do { \
854  volatile unsigned long _junk; \
855  CALL_FN_W_WW( _junk, fnptr, arg1, arg2 ); \
856  } while ( 0 )
857 
858 # define CALL_FN_v_WWW( fnptr, arg1, arg2, arg3 ) \
859  do { \
860  volatile unsigned long _junk; \
861  CALL_FN_W_WWW( _junk, fnptr, arg1, arg2, arg3 ); \
862  } while ( 0 )
863 
864 # define CALL_FN_v_WWWW( fnptr, arg1, arg2, arg3, arg4 ) \
865  do { \
866  volatile unsigned long _junk; \
867  CALL_FN_W_WWWW( _junk, fnptr, arg1, arg2, arg3, arg4 ); \
868  } while ( 0 )
869 
870 # define CALL_FN_v_5W( fnptr, arg1, arg2, arg3, arg4, arg5 ) \
871  do { \
872  volatile unsigned long _junk; \
873  CALL_FN_W_5W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5 ); \
874  } while ( 0 )
875 
876 # define CALL_FN_v_6W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ) \
877  do { \
878  volatile unsigned long _junk; \
879  CALL_FN_W_6W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ); \
880  } while ( 0 )
881 
882 # define CALL_FN_v_7W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
883  do { \
884  volatile unsigned long _junk; \
885  CALL_FN_W_7W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ); \
886  } while ( 0 )
887 
888 /* ------------------------- x86-{linux,darwin} ---------------- */
889 
890 # if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin )
891 
892 /* These regs are trashed by the hidden call. No need to mention eax
893  as gcc can already see that, plus causes gcc to bomb. */
894 # define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
895 
896 /* Macros to save and align the stack before making a function
897  call and restore it afterwards as gcc may not keep the stack
898  pointer aligned if it doesn't realise calls are being made
899  to other functions. */
900 
901 # define VALGRIND_ALIGN_STACK \
902  "movl %%esp,%%edi\n\t" \
903  "andl $0xfffffff0,%%esp\n\t"
904 # define VALGRIND_RESTORE_STACK "movl %%edi,%%esp\n\t"
905 
906 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
907  long) == 4. */
908 
909 # define CALL_FN_W_v( lval, orig ) \
910  do { \
911  volatile OrigFn _orig = ( orig ); \
912  volatile unsigned long _argvec[1]; \
913  volatile unsigned long _res; \
914  _argvec[0] = (unsigned long)_orig.nraddr; \
915  __asm__ volatile( VALGRIND_ALIGN_STACK "movl (%%eax), %%eax\n\t" /* target->%eax */ \
916  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
917  : /*out*/ "=a"( _res ) \
918  : /*in*/ "a"( &_argvec[0] ) \
919  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
920  lval = (__typeof__( lval ))_res; \
921  } while ( 0 )
922 
923 # define CALL_FN_W_W( lval, orig, arg1 ) \
924  do { \
925  volatile OrigFn _orig = ( orig ); \
926  volatile unsigned long _argvec[2]; \
927  volatile unsigned long _res; \
928  _argvec[0] = (unsigned long)_orig.nraddr; \
929  _argvec[1] = (unsigned long)( arg1 ); \
930  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
931  "pushl 4(%%eax)\n\t" \
932  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
933  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
934  : /*out*/ "=a"( _res ) \
935  : /*in*/ "a"( &_argvec[0] ) \
936  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
937  lval = (__typeof__( lval ))_res; \
938  } while ( 0 )
939 
940 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
941  do { \
942  volatile OrigFn _orig = ( orig ); \
943  volatile unsigned long _argvec[3]; \
944  volatile unsigned long _res; \
945  _argvec[0] = (unsigned long)_orig.nraddr; \
946  _argvec[1] = (unsigned long)( arg1 ); \
947  _argvec[2] = (unsigned long)( arg2 ); \
948  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
949  "pushl 8(%%eax)\n\t" \
950  "pushl 4(%%eax)\n\t" \
951  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
952  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
953  : /*out*/ "=a"( _res ) \
954  : /*in*/ "a"( &_argvec[0] ) \
955  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
956  lval = (__typeof__( lval ))_res; \
957  } while ( 0 )
958 
959 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
960  do { \
961  volatile OrigFn _orig = ( orig ); \
962  volatile unsigned long _argvec[4]; \
963  volatile unsigned long _res; \
964  _argvec[0] = (unsigned long)_orig.nraddr; \
965  _argvec[1] = (unsigned long)( arg1 ); \
966  _argvec[2] = (unsigned long)( arg2 ); \
967  _argvec[3] = (unsigned long)( arg3 ); \
968  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
969  "pushl 12(%%eax)\n\t" \
970  "pushl 8(%%eax)\n\t" \
971  "pushl 4(%%eax)\n\t" \
972  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
973  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
974  : /*out*/ "=a"( _res ) \
975  : /*in*/ "a"( &_argvec[0] ) \
976  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
977  lval = (__typeof__( lval ))_res; \
978  } while ( 0 )
979 
980 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
981  do { \
982  volatile OrigFn _orig = ( orig ); \
983  volatile unsigned long _argvec[5]; \
984  volatile unsigned long _res; \
985  _argvec[0] = (unsigned long)_orig.nraddr; \
986  _argvec[1] = (unsigned long)( arg1 ); \
987  _argvec[2] = (unsigned long)( arg2 ); \
988  _argvec[3] = (unsigned long)( arg3 ); \
989  _argvec[4] = (unsigned long)( arg4 ); \
990  __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 16(%%eax)\n\t" \
991  "pushl 12(%%eax)\n\t" \
992  "pushl 8(%%eax)\n\t" \
993  "pushl 4(%%eax)\n\t" \
994  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
995  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
996  : /*out*/ "=a"( _res ) \
997  : /*in*/ "a"( &_argvec[0] ) \
998  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
999  lval = (__typeof__( lval ))_res; \
1000  } while ( 0 )
1001 
1002 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1003  do { \
1004  volatile OrigFn _orig = ( orig ); \
1005  volatile unsigned long _argvec[6]; \
1006  volatile unsigned long _res; \
1007  _argvec[0] = (unsigned long)_orig.nraddr; \
1008  _argvec[1] = (unsigned long)( arg1 ); \
1009  _argvec[2] = (unsigned long)( arg2 ); \
1010  _argvec[3] = (unsigned long)( arg3 ); \
1011  _argvec[4] = (unsigned long)( arg4 ); \
1012  _argvec[5] = (unsigned long)( arg5 ); \
1013  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
1014  "pushl 20(%%eax)\n\t" \
1015  "pushl 16(%%eax)\n\t" \
1016  "pushl 12(%%eax)\n\t" \
1017  "pushl 8(%%eax)\n\t" \
1018  "pushl 4(%%eax)\n\t" \
1019  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1020  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1021  : /*out*/ "=a"( _res ) \
1022  : /*in*/ "a"( &_argvec[0] ) \
1023  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1024  lval = (__typeof__( lval ))_res; \
1025  } while ( 0 )
1026 
1027 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1028  do { \
1029  volatile OrigFn _orig = ( orig ); \
1030  volatile unsigned long _argvec[7]; \
1031  volatile unsigned long _res; \
1032  _argvec[0] = (unsigned long)_orig.nraddr; \
1033  _argvec[1] = (unsigned long)( arg1 ); \
1034  _argvec[2] = (unsigned long)( arg2 ); \
1035  _argvec[3] = (unsigned long)( arg3 ); \
1036  _argvec[4] = (unsigned long)( arg4 ); \
1037  _argvec[5] = (unsigned long)( arg5 ); \
1038  _argvec[6] = (unsigned long)( arg6 ); \
1039  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
1040  "pushl 24(%%eax)\n\t" \
1041  "pushl 20(%%eax)\n\t" \
1042  "pushl 16(%%eax)\n\t" \
1043  "pushl 12(%%eax)\n\t" \
1044  "pushl 8(%%eax)\n\t" \
1045  "pushl 4(%%eax)\n\t" \
1046  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1047  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1048  : /*out*/ "=a"( _res ) \
1049  : /*in*/ "a"( &_argvec[0] ) \
1050  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1051  lval = (__typeof__( lval ))_res; \
1052  } while ( 0 )
1053 
1054 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1055  do { \
1056  volatile OrigFn _orig = ( orig ); \
1057  volatile unsigned long _argvec[8]; \
1058  volatile unsigned long _res; \
1059  _argvec[0] = (unsigned long)_orig.nraddr; \
1060  _argvec[1] = (unsigned long)( arg1 ); \
1061  _argvec[2] = (unsigned long)( arg2 ); \
1062  _argvec[3] = (unsigned long)( arg3 ); \
1063  _argvec[4] = (unsigned long)( arg4 ); \
1064  _argvec[5] = (unsigned long)( arg5 ); \
1065  _argvec[6] = (unsigned long)( arg6 ); \
1066  _argvec[7] = (unsigned long)( arg7 ); \
1067  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1068  "pushl 28(%%eax)\n\t" \
1069  "pushl 24(%%eax)\n\t" \
1070  "pushl 20(%%eax)\n\t" \
1071  "pushl 16(%%eax)\n\t" \
1072  "pushl 12(%%eax)\n\t" \
1073  "pushl 8(%%eax)\n\t" \
1074  "pushl 4(%%eax)\n\t" \
1075  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1076  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1077  : /*out*/ "=a"( _res ) \
1078  : /*in*/ "a"( &_argvec[0] ) \
1079  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1080  lval = (__typeof__( lval ))_res; \
1081  } while ( 0 )
1082 
1083 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1084  do { \
1085  volatile OrigFn _orig = ( orig ); \
1086  volatile unsigned long _argvec[9]; \
1087  volatile unsigned long _res; \
1088  _argvec[0] = (unsigned long)_orig.nraddr; \
1089  _argvec[1] = (unsigned long)( arg1 ); \
1090  _argvec[2] = (unsigned long)( arg2 ); \
1091  _argvec[3] = (unsigned long)( arg3 ); \
1092  _argvec[4] = (unsigned long)( arg4 ); \
1093  _argvec[5] = (unsigned long)( arg5 ); \
1094  _argvec[6] = (unsigned long)( arg6 ); \
1095  _argvec[7] = (unsigned long)( arg7 ); \
1096  _argvec[8] = (unsigned long)( arg8 ); \
1097  __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 32(%%eax)\n\t" \
1098  "pushl 28(%%eax)\n\t" \
1099  "pushl 24(%%eax)\n\t" \
1100  "pushl 20(%%eax)\n\t" \
1101  "pushl 16(%%eax)\n\t" \
1102  "pushl 12(%%eax)\n\t" \
1103  "pushl 8(%%eax)\n\t" \
1104  "pushl 4(%%eax)\n\t" \
1105  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1106  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1107  : /*out*/ "=a"( _res ) \
1108  : /*in*/ "a"( &_argvec[0] ) \
1109  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1110  lval = (__typeof__( lval ))_res; \
1111  } while ( 0 )
1112 
1113 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1114  do { \
1115  volatile OrigFn _orig = ( orig ); \
1116  volatile unsigned long _argvec[10]; \
1117  volatile unsigned long _res; \
1118  _argvec[0] = (unsigned long)_orig.nraddr; \
1119  _argvec[1] = (unsigned long)( arg1 ); \
1120  _argvec[2] = (unsigned long)( arg2 ); \
1121  _argvec[3] = (unsigned long)( arg3 ); \
1122  _argvec[4] = (unsigned long)( arg4 ); \
1123  _argvec[5] = (unsigned long)( arg5 ); \
1124  _argvec[6] = (unsigned long)( arg6 ); \
1125  _argvec[7] = (unsigned long)( arg7 ); \
1126  _argvec[8] = (unsigned long)( arg8 ); \
1127  _argvec[9] = (unsigned long)( arg9 ); \
1128  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
1129  "pushl 36(%%eax)\n\t" \
1130  "pushl 32(%%eax)\n\t" \
1131  "pushl 28(%%eax)\n\t" \
1132  "pushl 24(%%eax)\n\t" \
1133  "pushl 20(%%eax)\n\t" \
1134  "pushl 16(%%eax)\n\t" \
1135  "pushl 12(%%eax)\n\t" \
1136  "pushl 8(%%eax)\n\t" \
1137  "pushl 4(%%eax)\n\t" \
1138  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1139  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1140  : /*out*/ "=a"( _res ) \
1141  : /*in*/ "a"( &_argvec[0] ) \
1142  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1143  lval = (__typeof__( lval ))_res; \
1144  } while ( 0 )
1145 
1146 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
1147  do { \
1148  volatile OrigFn _orig = ( orig ); \
1149  volatile unsigned long _argvec[11]; \
1150  volatile unsigned long _res; \
1151  _argvec[0] = (unsigned long)_orig.nraddr; \
1152  _argvec[1] = (unsigned long)( arg1 ); \
1153  _argvec[2] = (unsigned long)( arg2 ); \
1154  _argvec[3] = (unsigned long)( arg3 ); \
1155  _argvec[4] = (unsigned long)( arg4 ); \
1156  _argvec[5] = (unsigned long)( arg5 ); \
1157  _argvec[6] = (unsigned long)( arg6 ); \
1158  _argvec[7] = (unsigned long)( arg7 ); \
1159  _argvec[8] = (unsigned long)( arg8 ); \
1160  _argvec[9] = (unsigned long)( arg9 ); \
1161  _argvec[10] = (unsigned long)( arg10 ); \
1162  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
1163  "pushl 40(%%eax)\n\t" \
1164  "pushl 36(%%eax)\n\t" \
1165  "pushl 32(%%eax)\n\t" \
1166  "pushl 28(%%eax)\n\t" \
1167  "pushl 24(%%eax)\n\t" \
1168  "pushl 20(%%eax)\n\t" \
1169  "pushl 16(%%eax)\n\t" \
1170  "pushl 12(%%eax)\n\t" \
1171  "pushl 8(%%eax)\n\t" \
1172  "pushl 4(%%eax)\n\t" \
1173  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1174  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1175  : /*out*/ "=a"( _res ) \
1176  : /*in*/ "a"( &_argvec[0] ) \
1177  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1178  lval = (__typeof__( lval ))_res; \
1179  } while ( 0 )
1180 
1181 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
1182  do { \
1183  volatile OrigFn _orig = ( orig ); \
1184  volatile unsigned long _argvec[12]; \
1185  volatile unsigned long _res; \
1186  _argvec[0] = (unsigned long)_orig.nraddr; \
1187  _argvec[1] = (unsigned long)( arg1 ); \
1188  _argvec[2] = (unsigned long)( arg2 ); \
1189  _argvec[3] = (unsigned long)( arg3 ); \
1190  _argvec[4] = (unsigned long)( arg4 ); \
1191  _argvec[5] = (unsigned long)( arg5 ); \
1192  _argvec[6] = (unsigned long)( arg6 ); \
1193  _argvec[7] = (unsigned long)( arg7 ); \
1194  _argvec[8] = (unsigned long)( arg8 ); \
1195  _argvec[9] = (unsigned long)( arg9 ); \
1196  _argvec[10] = (unsigned long)( arg10 ); \
1197  _argvec[11] = (unsigned long)( arg11 ); \
1198  __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1199  "pushl 44(%%eax)\n\t" \
1200  "pushl 40(%%eax)\n\t" \
1201  "pushl 36(%%eax)\n\t" \
1202  "pushl 32(%%eax)\n\t" \
1203  "pushl 28(%%eax)\n\t" \
1204  "pushl 24(%%eax)\n\t" \
1205  "pushl 20(%%eax)\n\t" \
1206  "pushl 16(%%eax)\n\t" \
1207  "pushl 12(%%eax)\n\t" \
1208  "pushl 8(%%eax)\n\t" \
1209  "pushl 4(%%eax)\n\t" \
1210  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1211  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1212  : /*out*/ "=a"( _res ) \
1213  : /*in*/ "a"( &_argvec[0] ) \
1214  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1215  lval = (__typeof__( lval ))_res; \
1216  } while ( 0 )
1217 
1218 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
1219  do { \
1220  volatile OrigFn _orig = ( orig ); \
1221  volatile unsigned long _argvec[13]; \
1222  volatile unsigned long _res; \
1223  _argvec[0] = (unsigned long)_orig.nraddr; \
1224  _argvec[1] = (unsigned long)( arg1 ); \
1225  _argvec[2] = (unsigned long)( arg2 ); \
1226  _argvec[3] = (unsigned long)( arg3 ); \
1227  _argvec[4] = (unsigned long)( arg4 ); \
1228  _argvec[5] = (unsigned long)( arg5 ); \
1229  _argvec[6] = (unsigned long)( arg6 ); \
1230  _argvec[7] = (unsigned long)( arg7 ); \
1231  _argvec[8] = (unsigned long)( arg8 ); \
1232  _argvec[9] = (unsigned long)( arg9 ); \
1233  _argvec[10] = (unsigned long)( arg10 ); \
1234  _argvec[11] = (unsigned long)( arg11 ); \
1235  _argvec[12] = (unsigned long)( arg12 ); \
1236  __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 48(%%eax)\n\t" \
1237  "pushl 44(%%eax)\n\t" \
1238  "pushl 40(%%eax)\n\t" \
1239  "pushl 36(%%eax)\n\t" \
1240  "pushl 32(%%eax)\n\t" \
1241  "pushl 28(%%eax)\n\t" \
1242  "pushl 24(%%eax)\n\t" \
1243  "pushl 20(%%eax)\n\t" \
1244  "pushl 16(%%eax)\n\t" \
1245  "pushl 12(%%eax)\n\t" \
1246  "pushl 8(%%eax)\n\t" \
1247  "pushl 4(%%eax)\n\t" \
1248  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1249  VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1250  : /*out*/ "=a"( _res ) \
1251  : /*in*/ "a"( &_argvec[0] ) \
1252  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1253  lval = (__typeof__( lval ))_res; \
1254  } while ( 0 )
1255 
1256 # endif /* PLAT_x86_linux || PLAT_x86_darwin */
1257 
1258 /* ------------------------ amd64-{linux,darwin} --------------- */
1259 
1260 # if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin )
1261 
1262 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1263 
1264 /* These regs are trashed by the hidden call. */
1265 # define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11"
1266 
1267 /* This is all pretty complex. It's so as to make stack unwinding
1268  work reliably. See bug 243270. The basic problem is the sub and
1269  add of 128 of %rsp in all of the following macros. If gcc believes
1270  the CFA is in %rsp, then unwinding may fail, because what's at the
1271  CFA is not what gcc "expected" when it constructs the CFIs for the
1272  places where the macros are instantiated.
1273 
1274  But we can't just add a CFI annotation to increase the CFA offset
1275  by 128, to match the sub of 128 from %rsp, because we don't know
1276  whether gcc has chosen %rsp as the CFA at that point, or whether it
1277  has chosen some other register (eg, %rbp). In the latter case,
1278  adding a CFI annotation to change the CFA offset is simply wrong.
1279 
1280  So the solution is to get hold of the CFA using
1281  __builtin_dwarf_cfa(), put it in a known register, and add a
1282  CFI annotation to say what the register is. We choose %rbp for
1283  this (perhaps perversely), because:
1284 
1285  (1) %rbp is already subject to unwinding. If a new register was
1286  chosen then the unwinder would have to unwind it in all stack
1287  traces, which is expensive, and
1288 
1289  (2) %rbp is already subject to precise exception updates in the
1290  JIT. If a new register was chosen, we'd have to have precise
1291  exceptions for it too, which reduces performance of the
1292  generated code.
1293 
1294  However .. one extra complication. We can't just whack the result
1295  of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1296  list of trashed registers at the end of the inline assembly
1297  fragments; gcc won't allow %rbp to appear in that list. Hence
1298  instead we need to stash %rbp in %r15 for the duration of the asm,
1299  and say that %r15 is trashed instead. gcc seems happy to go with
1300  that.
1301 
1302  Oh .. and this all needs to be conditionalised so that it is
1303  unchanged from before this commit, when compiled with older gccs
1304  that don't support __builtin_dwarf_cfa. Furthermore, since
1305  this header file is freestanding, it has to be independent of
1306  config.h, and so the following conditionalisation cannot depend on
1307  configure time checks.
1308 
1309  Although it's not clear from
1310  'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1311  this expression excludes Darwin.
1312  .cfi directives in Darwin assembly appear to be completely
1313  different and I haven't investigated how they work.
1314 
1315  For even more entertainment value, note we have to use the
1316  completely undocumented __builtin_dwarf_cfa(), which appears to
1317  really compute the CFA, whereas __builtin_frame_address(0) claims
1318  to but actually doesn't. See
1319  https://bugs.kde.org/show_bug.cgi?id=243270#c47
1320 */
1321 # if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM )
1322 # define __FRAME_POINTER , "r"( __builtin_dwarf_cfa() )
1323 # define VALGRIND_CFI_PROLOGUE \
1324  "movq %%rbp, %%r15\n\t" \
1325  "movq %2, %%rbp\n\t" \
1326  ".cfi_remember_state\n\t" \
1327  ".cfi_def_cfa rbp, 0\n\t"
1328 # define VALGRIND_CFI_EPILOGUE \
1329  "movq %%r15, %%rbp\n\t" \
1330  ".cfi_restore_state\n\t"
1331 # else
1332 # define __FRAME_POINTER
1333 # define VALGRIND_CFI_PROLOGUE
1334 # define VALGRIND_CFI_EPILOGUE
1335 # endif
1336 
1337 /* Macros to save and align the stack before making a function
1338  call and restore it afterwards as gcc may not keep the stack
1339  pointer aligned if it doesn't realise calls are being made
1340  to other functions. */
1341 
1342 # define VALGRIND_ALIGN_STACK \
1343  "movq %%rsp,%%r14\n\t" \
1344  "andq $0xfffffffffffffff0,%%rsp\n\t"
1345 # define VALGRIND_RESTORE_STACK "movq %%r14,%%rsp\n\t"
1346 
1347 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1348  long) == 8. */
1349 
1350 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1351  macros. In order not to trash the stack redzone, we need to drop
1352  %rsp by 128 before the hidden call, and restore afterwards. The
1353  nastyness is that it is only by luck that the stack still appears
1354  to be unwindable during the hidden call - since then the behaviour
1355  of any routine using this macro does not match what the CFI data
1356  says. Sigh.
1357 
1358  Why is this important? Imagine that a wrapper has a stack
1359  allocated local, and passes to the hidden call, a pointer to it.
1360  Because gcc does not know about the hidden call, it may allocate
1361  that local in the redzone. Unfortunately the hidden call may then
1362  trash it before it comes to use it. So we must step clear of the
1363  redzone, for the duration of the hidden call, to make it safe.
1364 
1365  Probably the same problem afflicts the other redzone-style ABIs too
1366  (ppc64-linux); but for those, the stack is
1367  self describing (none of this CFI nonsense) so at least messing
1368  with the stack pointer doesn't give a danger of non-unwindable
1369  stack. */
1370 
1371 # define CALL_FN_W_v( lval, orig ) \
1372  do { \
1373  volatile OrigFn _orig = ( orig ); \
1374  volatile unsigned long _argvec[1]; \
1375  volatile unsigned long _res; \
1376  _argvec[0] = (unsigned long)_orig.nraddr; \
1377  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1378  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1379  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1380  : /*out*/ "=a"( _res ) \
1381  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1382  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1383  lval = (__typeof__( lval ))_res; \
1384  } while ( 0 )
1385 
1386 # define CALL_FN_W_W( lval, orig, arg1 ) \
1387  do { \
1388  volatile OrigFn _orig = ( orig ); \
1389  volatile unsigned long _argvec[2]; \
1390  volatile unsigned long _res; \
1391  _argvec[0] = (unsigned long)_orig.nraddr; \
1392  _argvec[1] = (unsigned long)( arg1 ); \
1393  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1394  "movq 8(%%rax), %%rdi\n\t" \
1395  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1396  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1397  : /*out*/ "=a"( _res ) \
1398  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1399  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1400  lval = (__typeof__( lval ))_res; \
1401  } while ( 0 )
1402 
1403 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
1404  do { \
1405  volatile OrigFn _orig = ( orig ); \
1406  volatile unsigned long _argvec[3]; \
1407  volatile unsigned long _res; \
1408  _argvec[0] = (unsigned long)_orig.nraddr; \
1409  _argvec[1] = (unsigned long)( arg1 ); \
1410  _argvec[2] = (unsigned long)( arg2 ); \
1411  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1412  "movq 16(%%rax), %%rsi\n\t" \
1413  "movq 8(%%rax), %%rdi\n\t" \
1414  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1415  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1416  : /*out*/ "=a"( _res ) \
1417  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1418  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1419  lval = (__typeof__( lval ))_res; \
1420  } while ( 0 )
1421 
1422 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
1423  do { \
1424  volatile OrigFn _orig = ( orig ); \
1425  volatile unsigned long _argvec[4]; \
1426  volatile unsigned long _res; \
1427  _argvec[0] = (unsigned long)_orig.nraddr; \
1428  _argvec[1] = (unsigned long)( arg1 ); \
1429  _argvec[2] = (unsigned long)( arg2 ); \
1430  _argvec[3] = (unsigned long)( arg3 ); \
1431  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1432  "movq 24(%%rax), %%rdx\n\t" \
1433  "movq 16(%%rax), %%rsi\n\t" \
1434  "movq 8(%%rax), %%rdi\n\t" \
1435  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1436  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1437  : /*out*/ "=a"( _res ) \
1438  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1439  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1440  lval = (__typeof__( lval ))_res; \
1441  } while ( 0 )
1442 
1443 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
1444  do { \
1445  volatile OrigFn _orig = ( orig ); \
1446  volatile unsigned long _argvec[5]; \
1447  volatile unsigned long _res; \
1448  _argvec[0] = (unsigned long)_orig.nraddr; \
1449  _argvec[1] = (unsigned long)( arg1 ); \
1450  _argvec[2] = (unsigned long)( arg2 ); \
1451  _argvec[3] = (unsigned long)( arg3 ); \
1452  _argvec[4] = (unsigned long)( arg4 ); \
1453  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1454  "movq 32(%%rax), %%rcx\n\t" \
1455  "movq 24(%%rax), %%rdx\n\t" \
1456  "movq 16(%%rax), %%rsi\n\t" \
1457  "movq 8(%%rax), %%rdi\n\t" \
1458  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1459  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1460  : /*out*/ "=a"( _res ) \
1461  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1462  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1463  lval = (__typeof__( lval ))_res; \
1464  } while ( 0 )
1465 
1466 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1467  do { \
1468  volatile OrigFn _orig = ( orig ); \
1469  volatile unsigned long _argvec[6]; \
1470  volatile unsigned long _res; \
1471  _argvec[0] = (unsigned long)_orig.nraddr; \
1472  _argvec[1] = (unsigned long)( arg1 ); \
1473  _argvec[2] = (unsigned long)( arg2 ); \
1474  _argvec[3] = (unsigned long)( arg3 ); \
1475  _argvec[4] = (unsigned long)( arg4 ); \
1476  _argvec[5] = (unsigned long)( arg5 ); \
1477  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1478  "movq 40(%%rax), %%r8\n\t" \
1479  "movq 32(%%rax), %%rcx\n\t" \
1480  "movq 24(%%rax), %%rdx\n\t" \
1481  "movq 16(%%rax), %%rsi\n\t" \
1482  "movq 8(%%rax), %%rdi\n\t" \
1483  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1484  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1485  : /*out*/ "=a"( _res ) \
1486  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1487  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1488  lval = (__typeof__( lval ))_res; \
1489  } while ( 0 )
1490 
1491 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1492  do { \
1493  volatile OrigFn _orig = ( orig ); \
1494  volatile unsigned long _argvec[7]; \
1495  volatile unsigned long _res; \
1496  _argvec[0] = (unsigned long)_orig.nraddr; \
1497  _argvec[1] = (unsigned long)( arg1 ); \
1498  _argvec[2] = (unsigned long)( arg2 ); \
1499  _argvec[3] = (unsigned long)( arg3 ); \
1500  _argvec[4] = (unsigned long)( arg4 ); \
1501  _argvec[5] = (unsigned long)( arg5 ); \
1502  _argvec[6] = (unsigned long)( arg6 ); \
1503  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1504  "movq 48(%%rax), %%r9\n\t" \
1505  "movq 40(%%rax), %%r8\n\t" \
1506  "movq 32(%%rax), %%rcx\n\t" \
1507  "movq 24(%%rax), %%rdx\n\t" \
1508  "movq 16(%%rax), %%rsi\n\t" \
1509  "movq 8(%%rax), %%rdi\n\t" \
1510  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1511  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1512  : /*out*/ "=a"( _res ) \
1513  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1514  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1515  lval = (__typeof__( lval ))_res; \
1516  } while ( 0 )
1517 
1518 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1519  do { \
1520  volatile OrigFn _orig = ( orig ); \
1521  volatile unsigned long _argvec[8]; \
1522  volatile unsigned long _res; \
1523  _argvec[0] = (unsigned long)_orig.nraddr; \
1524  _argvec[1] = (unsigned long)( arg1 ); \
1525  _argvec[2] = (unsigned long)( arg2 ); \
1526  _argvec[3] = (unsigned long)( arg3 ); \
1527  _argvec[4] = (unsigned long)( arg4 ); \
1528  _argvec[5] = (unsigned long)( arg5 ); \
1529  _argvec[6] = (unsigned long)( arg6 ); \
1530  _argvec[7] = (unsigned long)( arg7 ); \
1531  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1532  "pushq 56(%%rax)\n\t" \
1533  "movq 48(%%rax), %%r9\n\t" \
1534  "movq 40(%%rax), %%r8\n\t" \
1535  "movq 32(%%rax), %%rcx\n\t" \
1536  "movq 24(%%rax), %%rdx\n\t" \
1537  "movq 16(%%rax), %%rsi\n\t" \
1538  "movq 8(%%rax), %%rdi\n\t" \
1539  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1540  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1541  : /*out*/ "=a"( _res ) \
1542  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1543  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1544  lval = (__typeof__( lval ))_res; \
1545  } while ( 0 )
1546 
1547 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1548  do { \
1549  volatile OrigFn _orig = ( orig ); \
1550  volatile unsigned long _argvec[9]; \
1551  volatile unsigned long _res; \
1552  _argvec[0] = (unsigned long)_orig.nraddr; \
1553  _argvec[1] = (unsigned long)( arg1 ); \
1554  _argvec[2] = (unsigned long)( arg2 ); \
1555  _argvec[3] = (unsigned long)( arg3 ); \
1556  _argvec[4] = (unsigned long)( arg4 ); \
1557  _argvec[5] = (unsigned long)( arg5 ); \
1558  _argvec[6] = (unsigned long)( arg6 ); \
1559  _argvec[7] = (unsigned long)( arg7 ); \
1560  _argvec[8] = (unsigned long)( arg8 ); \
1561  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1562  "pushq 64(%%rax)\n\t" \
1563  "pushq 56(%%rax)\n\t" \
1564  "movq 48(%%rax), %%r9\n\t" \
1565  "movq 40(%%rax), %%r8\n\t" \
1566  "movq 32(%%rax), %%rcx\n\t" \
1567  "movq 24(%%rax), %%rdx\n\t" \
1568  "movq 16(%%rax), %%rsi\n\t" \
1569  "movq 8(%%rax), %%rdi\n\t" \
1570  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1571  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1572  : /*out*/ "=a"( _res ) \
1573  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1574  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1575  lval = (__typeof__( lval ))_res; \
1576  } while ( 0 )
1577 
1578 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1579  do { \
1580  volatile OrigFn _orig = ( orig ); \
1581  volatile unsigned long _argvec[10]; \
1582  volatile unsigned long _res; \
1583  _argvec[0] = (unsigned long)_orig.nraddr; \
1584  _argvec[1] = (unsigned long)( arg1 ); \
1585  _argvec[2] = (unsigned long)( arg2 ); \
1586  _argvec[3] = (unsigned long)( arg3 ); \
1587  _argvec[4] = (unsigned long)( arg4 ); \
1588  _argvec[5] = (unsigned long)( arg5 ); \
1589  _argvec[6] = (unsigned long)( arg6 ); \
1590  _argvec[7] = (unsigned long)( arg7 ); \
1591  _argvec[8] = (unsigned long)( arg8 ); \
1592  _argvec[9] = (unsigned long)( arg9 ); \
1593  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1594  "pushq 72(%%rax)\n\t" \
1595  "pushq 64(%%rax)\n\t" \
1596  "pushq 56(%%rax)\n\t" \
1597  "movq 48(%%rax), %%r9\n\t" \
1598  "movq 40(%%rax), %%r8\n\t" \
1599  "movq 32(%%rax), %%rcx\n\t" \
1600  "movq 24(%%rax), %%rdx\n\t" \
1601  "movq 16(%%rax), %%rsi\n\t" \
1602  "movq 8(%%rax), %%rdi\n\t" \
1603  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1604  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1605  : /*out*/ "=a"( _res ) \
1606  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1607  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1608  lval = (__typeof__( lval ))_res; \
1609  } while ( 0 )
1610 
1611 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
1612  do { \
1613  volatile OrigFn _orig = ( orig ); \
1614  volatile unsigned long _argvec[11]; \
1615  volatile unsigned long _res; \
1616  _argvec[0] = (unsigned long)_orig.nraddr; \
1617  _argvec[1] = (unsigned long)( arg1 ); \
1618  _argvec[2] = (unsigned long)( arg2 ); \
1619  _argvec[3] = (unsigned long)( arg3 ); \
1620  _argvec[4] = (unsigned long)( arg4 ); \
1621  _argvec[5] = (unsigned long)( arg5 ); \
1622  _argvec[6] = (unsigned long)( arg6 ); \
1623  _argvec[7] = (unsigned long)( arg7 ); \
1624  _argvec[8] = (unsigned long)( arg8 ); \
1625  _argvec[9] = (unsigned long)( arg9 ); \
1626  _argvec[10] = (unsigned long)( arg10 ); \
1627  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1628  "pushq 80(%%rax)\n\t" \
1629  "pushq 72(%%rax)\n\t" \
1630  "pushq 64(%%rax)\n\t" \
1631  "pushq 56(%%rax)\n\t" \
1632  "movq 48(%%rax), %%r9\n\t" \
1633  "movq 40(%%rax), %%r8\n\t" \
1634  "movq 32(%%rax), %%rcx\n\t" \
1635  "movq 24(%%rax), %%rdx\n\t" \
1636  "movq 16(%%rax), %%rsi\n\t" \
1637  "movq 8(%%rax), %%rdi\n\t" \
1638  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1639  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1640  : /*out*/ "=a"( _res ) \
1641  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1642  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1643  lval = (__typeof__( lval ))_res; \
1644  } while ( 0 )
1645 
1646 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
1647  do { \
1648  volatile OrigFn _orig = ( orig ); \
1649  volatile unsigned long _argvec[12]; \
1650  volatile unsigned long _res; \
1651  _argvec[0] = (unsigned long)_orig.nraddr; \
1652  _argvec[1] = (unsigned long)( arg1 ); \
1653  _argvec[2] = (unsigned long)( arg2 ); \
1654  _argvec[3] = (unsigned long)( arg3 ); \
1655  _argvec[4] = (unsigned long)( arg4 ); \
1656  _argvec[5] = (unsigned long)( arg5 ); \
1657  _argvec[6] = (unsigned long)( arg6 ); \
1658  _argvec[7] = (unsigned long)( arg7 ); \
1659  _argvec[8] = (unsigned long)( arg8 ); \
1660  _argvec[9] = (unsigned long)( arg9 ); \
1661  _argvec[10] = (unsigned long)( arg10 ); \
1662  _argvec[11] = (unsigned long)( arg11 ); \
1663  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1664  "pushq 88(%%rax)\n\t" \
1665  "pushq 80(%%rax)\n\t" \
1666  "pushq 72(%%rax)\n\t" \
1667  "pushq 64(%%rax)\n\t" \
1668  "pushq 56(%%rax)\n\t" \
1669  "movq 48(%%rax), %%r9\n\t" \
1670  "movq 40(%%rax), %%r8\n\t" \
1671  "movq 32(%%rax), %%rcx\n\t" \
1672  "movq 24(%%rax), %%rdx\n\t" \
1673  "movq 16(%%rax), %%rsi\n\t" \
1674  "movq 8(%%rax), %%rdi\n\t" \
1675  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1676  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1677  : /*out*/ "=a"( _res ) \
1678  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1679  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1680  lval = (__typeof__( lval ))_res; \
1681  } while ( 0 )
1682 
1683 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
1684  do { \
1685  volatile OrigFn _orig = ( orig ); \
1686  volatile unsigned long _argvec[13]; \
1687  volatile unsigned long _res; \
1688  _argvec[0] = (unsigned long)_orig.nraddr; \
1689  _argvec[1] = (unsigned long)( arg1 ); \
1690  _argvec[2] = (unsigned long)( arg2 ); \
1691  _argvec[3] = (unsigned long)( arg3 ); \
1692  _argvec[4] = (unsigned long)( arg4 ); \
1693  _argvec[5] = (unsigned long)( arg5 ); \
1694  _argvec[6] = (unsigned long)( arg6 ); \
1695  _argvec[7] = (unsigned long)( arg7 ); \
1696  _argvec[8] = (unsigned long)( arg8 ); \
1697  _argvec[9] = (unsigned long)( arg9 ); \
1698  _argvec[10] = (unsigned long)( arg10 ); \
1699  _argvec[11] = (unsigned long)( arg11 ); \
1700  _argvec[12] = (unsigned long)( arg12 ); \
1701  __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1702  "pushq 96(%%rax)\n\t" \
1703  "pushq 88(%%rax)\n\t" \
1704  "pushq 80(%%rax)\n\t" \
1705  "pushq 72(%%rax)\n\t" \
1706  "pushq 64(%%rax)\n\t" \
1707  "pushq 56(%%rax)\n\t" \
1708  "movq 48(%%rax), %%r9\n\t" \
1709  "movq 40(%%rax), %%r8\n\t" \
1710  "movq 32(%%rax), %%rcx\n\t" \
1711  "movq 24(%%rax), %%rdx\n\t" \
1712  "movq 16(%%rax), %%rsi\n\t" \
1713  "movq 8(%%rax), %%rdi\n\t" \
1714  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1715  VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1716  : /*out*/ "=a"( _res ) \
1717  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
1718  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1719  lval = (__typeof__( lval ))_res; \
1720  } while ( 0 )
1721 
1722 # endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
1723 
1724 /* ------------------------ ppc32-linux ------------------------ */
1725 
1726 # if defined( PLAT_ppc32_linux )
1727 
1728 /* This is useful for finding out about the on-stack stuff:
1729 
1730  extern int f9 ( int,int,int,int,int,int,int,int,int );
1731  extern int f10 ( int,int,int,int,int,int,int,int,int,int );
1732  extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
1733  extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
1734 
1735  int g9 ( void ) {
1736  return f9(11,22,33,44,55,66,77,88,99);
1737  }
1738  int g10 ( void ) {
1739  return f10(11,22,33,44,55,66,77,88,99,110);
1740  }
1741  int g11 ( void ) {
1742  return f11(11,22,33,44,55,66,77,88,99,110,121);
1743  }
1744  int g12 ( void ) {
1745  return f12(11,22,33,44,55,66,77,88,99,110,121,132);
1746  }
1747 */
1748 
1749 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
1750 
1751 /* These regs are trashed by the hidden call. */
1752 # define __CALLER_SAVED_REGS \
1753  "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
1754  "r7", "r8", "r9", "r10", "r11", "r12", "r13"
1755 
1756 /* Macros to save and align the stack before making a function
1757  call and restore it afterwards as gcc may not keep the stack
1758  pointer aligned if it doesn't realise calls are being made
1759  to other functions. */
1760 
1761 # define VALGRIND_ALIGN_STACK \
1762  "mr 28,1\n\t" \
1763  "rlwinm 1,1,0,0,27\n\t"
1764 # define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
1765 
1766 /* These CALL_FN_ macros assume that on ppc32-linux,
1767  sizeof(unsigned long) == 4. */
1768 
1769 # define CALL_FN_W_v( lval, orig ) \
1770  do { \
1771  volatile OrigFn _orig = ( orig ); \
1772  volatile unsigned long _argvec[1]; \
1773  volatile unsigned long _res; \
1774  _argvec[0] = (unsigned long)_orig.nraddr; \
1775  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1776  "lwz 11,0(11)\n\t" /* target->r11 */ \
1777  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1778  : /*out*/ "=r"( _res ) \
1779  : /*in*/ "r"( &_argvec[0] ) \
1780  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1781  lval = (__typeof__( lval ))_res; \
1782  } while ( 0 )
1783 
1784 # define CALL_FN_W_W( lval, orig, arg1 ) \
1785  do { \
1786  volatile OrigFn _orig = ( orig ); \
1787  volatile unsigned long _argvec[2]; \
1788  volatile unsigned long _res; \
1789  _argvec[0] = (unsigned long)_orig.nraddr; \
1790  _argvec[1] = (unsigned long)arg1; \
1791  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1792  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1793  "lwz 11,0(11)\n\t" /* target->r11 */ \
1794  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1795  : /*out*/ "=r"( _res ) \
1796  : /*in*/ "r"( &_argvec[0] ) \
1797  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1798  lval = (__typeof__( lval ))_res; \
1799  } while ( 0 )
1800 
1801 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
1802  do { \
1803  volatile OrigFn _orig = ( orig ); \
1804  volatile unsigned long _argvec[3]; \
1805  volatile unsigned long _res; \
1806  _argvec[0] = (unsigned long)_orig.nraddr; \
1807  _argvec[1] = (unsigned long)arg1; \
1808  _argvec[2] = (unsigned long)arg2; \
1809  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1810  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1811  "lwz 4,8(11)\n\t" \
1812  "lwz 11,0(11)\n\t" /* target->r11 */ \
1813  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1814  : /*out*/ "=r"( _res ) \
1815  : /*in*/ "r"( &_argvec[0] ) \
1816  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1817  lval = (__typeof__( lval ))_res; \
1818  } while ( 0 )
1819 
1820 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
1821  do { \
1822  volatile OrigFn _orig = ( orig ); \
1823  volatile unsigned long _argvec[4]; \
1824  volatile unsigned long _res; \
1825  _argvec[0] = (unsigned long)_orig.nraddr; \
1826  _argvec[1] = (unsigned long)arg1; \
1827  _argvec[2] = (unsigned long)arg2; \
1828  _argvec[3] = (unsigned long)arg3; \
1829  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1830  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1831  "lwz 4,8(11)\n\t" \
1832  "lwz 5,12(11)\n\t" \
1833  "lwz 11,0(11)\n\t" /* target->r11 */ \
1834  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1835  : /*out*/ "=r"( _res ) \
1836  : /*in*/ "r"( &_argvec[0] ) \
1837  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1838  lval = (__typeof__( lval ))_res; \
1839  } while ( 0 )
1840 
1841 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
1842  do { \
1843  volatile OrigFn _orig = ( orig ); \
1844  volatile unsigned long _argvec[5]; \
1845  volatile unsigned long _res; \
1846  _argvec[0] = (unsigned long)_orig.nraddr; \
1847  _argvec[1] = (unsigned long)arg1; \
1848  _argvec[2] = (unsigned long)arg2; \
1849  _argvec[3] = (unsigned long)arg3; \
1850  _argvec[4] = (unsigned long)arg4; \
1851  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1852  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1853  "lwz 4,8(11)\n\t" \
1854  "lwz 5,12(11)\n\t" \
1855  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1856  "lwz 11,0(11)\n\t" /* target->r11 */ \
1857  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1858  : /*out*/ "=r"( _res ) \
1859  : /*in*/ "r"( &_argvec[0] ) \
1860  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1861  lval = (__typeof__( lval ))_res; \
1862  } while ( 0 )
1863 
1864 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1865  do { \
1866  volatile OrigFn _orig = ( orig ); \
1867  volatile unsigned long _argvec[6]; \
1868  volatile unsigned long _res; \
1869  _argvec[0] = (unsigned long)_orig.nraddr; \
1870  _argvec[1] = (unsigned long)arg1; \
1871  _argvec[2] = (unsigned long)arg2; \
1872  _argvec[3] = (unsigned long)arg3; \
1873  _argvec[4] = (unsigned long)arg4; \
1874  _argvec[5] = (unsigned long)arg5; \
1875  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1876  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1877  "lwz 4,8(11)\n\t" \
1878  "lwz 5,12(11)\n\t" \
1879  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1880  "lwz 7,20(11)\n\t" \
1881  "lwz 11,0(11)\n\t" /* target->r11 */ \
1882  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1883  : /*out*/ "=r"( _res ) \
1884  : /*in*/ "r"( &_argvec[0] ) \
1885  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1886  lval = (__typeof__( lval ))_res; \
1887  } while ( 0 )
1888 
1889 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1890  do { \
1891  volatile OrigFn _orig = ( orig ); \
1892  volatile unsigned long _argvec[7]; \
1893  volatile unsigned long _res; \
1894  _argvec[0] = (unsigned long)_orig.nraddr; \
1895  _argvec[1] = (unsigned long)arg1; \
1896  _argvec[2] = (unsigned long)arg2; \
1897  _argvec[3] = (unsigned long)arg3; \
1898  _argvec[4] = (unsigned long)arg4; \
1899  _argvec[5] = (unsigned long)arg5; \
1900  _argvec[6] = (unsigned long)arg6; \
1901  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1902  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1903  "lwz 4,8(11)\n\t" \
1904  "lwz 5,12(11)\n\t" \
1905  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1906  "lwz 7,20(11)\n\t" \
1907  "lwz 8,24(11)\n\t" \
1908  "lwz 11,0(11)\n\t" /* target->r11 */ \
1909  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1910  : /*out*/ "=r"( _res ) \
1911  : /*in*/ "r"( &_argvec[0] ) \
1912  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1913  lval = (__typeof__( lval ))_res; \
1914  } while ( 0 )
1915 
1916 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1917  do { \
1918  volatile OrigFn _orig = ( orig ); \
1919  volatile unsigned long _argvec[8]; \
1920  volatile unsigned long _res; \
1921  _argvec[0] = (unsigned long)_orig.nraddr; \
1922  _argvec[1] = (unsigned long)arg1; \
1923  _argvec[2] = (unsigned long)arg2; \
1924  _argvec[3] = (unsigned long)arg3; \
1925  _argvec[4] = (unsigned long)arg4; \
1926  _argvec[5] = (unsigned long)arg5; \
1927  _argvec[6] = (unsigned long)arg6; \
1928  _argvec[7] = (unsigned long)arg7; \
1929  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1930  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1931  "lwz 4,8(11)\n\t" \
1932  "lwz 5,12(11)\n\t" \
1933  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1934  "lwz 7,20(11)\n\t" \
1935  "lwz 8,24(11)\n\t" \
1936  "lwz 9,28(11)\n\t" \
1937  "lwz 11,0(11)\n\t" /* target->r11 */ \
1938  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1939  : /*out*/ "=r"( _res ) \
1940  : /*in*/ "r"( &_argvec[0] ) \
1941  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1942  lval = (__typeof__( lval ))_res; \
1943  } while ( 0 )
1944 
1945 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1946  do { \
1947  volatile OrigFn _orig = ( orig ); \
1948  volatile unsigned long _argvec[9]; \
1949  volatile unsigned long _res; \
1950  _argvec[0] = (unsigned long)_orig.nraddr; \
1951  _argvec[1] = (unsigned long)arg1; \
1952  _argvec[2] = (unsigned long)arg2; \
1953  _argvec[3] = (unsigned long)arg3; \
1954  _argvec[4] = (unsigned long)arg4; \
1955  _argvec[5] = (unsigned long)arg5; \
1956  _argvec[6] = (unsigned long)arg6; \
1957  _argvec[7] = (unsigned long)arg7; \
1958  _argvec[8] = (unsigned long)arg8; \
1959  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1960  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1961  "lwz 4,8(11)\n\t" \
1962  "lwz 5,12(11)\n\t" \
1963  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1964  "lwz 7,20(11)\n\t" \
1965  "lwz 8,24(11)\n\t" \
1966  "lwz 9,28(11)\n\t" \
1967  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
1968  "lwz 11,0(11)\n\t" /* target->r11 */ \
1969  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1970  : /*out*/ "=r"( _res ) \
1971  : /*in*/ "r"( &_argvec[0] ) \
1972  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1973  lval = (__typeof__( lval ))_res; \
1974  } while ( 0 )
1975 
1976 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1977  do { \
1978  volatile OrigFn _orig = ( orig ); \
1979  volatile unsigned long _argvec[10]; \
1980  volatile unsigned long _res; \
1981  _argvec[0] = (unsigned long)_orig.nraddr; \
1982  _argvec[1] = (unsigned long)arg1; \
1983  _argvec[2] = (unsigned long)arg2; \
1984  _argvec[3] = (unsigned long)arg3; \
1985  _argvec[4] = (unsigned long)arg4; \
1986  _argvec[5] = (unsigned long)arg5; \
1987  _argvec[6] = (unsigned long)arg6; \
1988  _argvec[7] = (unsigned long)arg7; \
1989  _argvec[8] = (unsigned long)arg8; \
1990  _argvec[9] = (unsigned long)arg9; \
1991  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1992  "addi 1,1,-16\n\t" /* arg9 */ \
1993  "lwz 3,36(11)\n\t" \
1994  "stw 3,8(1)\n\t" /* args1-8 */ \
1995  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1996  "lwz 4,8(11)\n\t" \
1997  "lwz 5,12(11)\n\t" \
1998  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1999  "lwz 7,20(11)\n\t" \
2000  "lwz 8,24(11)\n\t" \
2001  "lwz 9,28(11)\n\t" \
2002  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2003  "lwz 11,0(11)\n\t" /* target->r11 */ \
2004  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2005  : /*out*/ "=r"( _res ) \
2006  : /*in*/ "r"( &_argvec[0] ) \
2007  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2008  lval = (__typeof__( lval ))_res; \
2009  } while ( 0 )
2010 
2011 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2012  do { \
2013  volatile OrigFn _orig = ( orig ); \
2014  volatile unsigned long _argvec[11]; \
2015  volatile unsigned long _res; \
2016  _argvec[0] = (unsigned long)_orig.nraddr; \
2017  _argvec[1] = (unsigned long)arg1; \
2018  _argvec[2] = (unsigned long)arg2; \
2019  _argvec[3] = (unsigned long)arg3; \
2020  _argvec[4] = (unsigned long)arg4; \
2021  _argvec[5] = (unsigned long)arg5; \
2022  _argvec[6] = (unsigned long)arg6; \
2023  _argvec[7] = (unsigned long)arg7; \
2024  _argvec[8] = (unsigned long)arg8; \
2025  _argvec[9] = (unsigned long)arg9; \
2026  _argvec[10] = (unsigned long)arg10; \
2027  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2028  "addi 1,1,-16\n\t" /* arg10 */ \
2029  "lwz 3,40(11)\n\t" \
2030  "stw 3,12(1)\n\t" /* arg9 */ \
2031  "lwz 3,36(11)\n\t" \
2032  "stw 3,8(1)\n\t" /* args1-8 */ \
2033  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2034  "lwz 4,8(11)\n\t" \
2035  "lwz 5,12(11)\n\t" \
2036  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2037  "lwz 7,20(11)\n\t" \
2038  "lwz 8,24(11)\n\t" \
2039  "lwz 9,28(11)\n\t" \
2040  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2041  "lwz 11,0(11)\n\t" /* target->r11 */ \
2042  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2043  : /*out*/ "=r"( _res ) \
2044  : /*in*/ "r"( &_argvec[0] ) \
2045  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2046  lval = (__typeof__( lval ))_res; \
2047  } while ( 0 )
2048 
2049 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2050  do { \
2051  volatile OrigFn _orig = ( orig ); \
2052  volatile unsigned long _argvec[12]; \
2053  volatile unsigned long _res; \
2054  _argvec[0] = (unsigned long)_orig.nraddr; \
2055  _argvec[1] = (unsigned long)arg1; \
2056  _argvec[2] = (unsigned long)arg2; \
2057  _argvec[3] = (unsigned long)arg3; \
2058  _argvec[4] = (unsigned long)arg4; \
2059  _argvec[5] = (unsigned long)arg5; \
2060  _argvec[6] = (unsigned long)arg6; \
2061  _argvec[7] = (unsigned long)arg7; \
2062  _argvec[8] = (unsigned long)arg8; \
2063  _argvec[9] = (unsigned long)arg9; \
2064  _argvec[10] = (unsigned long)arg10; \
2065  _argvec[11] = (unsigned long)arg11; \
2066  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2067  "addi 1,1,-32\n\t" /* arg11 */ \
2068  "lwz 3,44(11)\n\t" \
2069  "stw 3,16(1)\n\t" /* arg10 */ \
2070  "lwz 3,40(11)\n\t" \
2071  "stw 3,12(1)\n\t" /* arg9 */ \
2072  "lwz 3,36(11)\n\t" \
2073  "stw 3,8(1)\n\t" /* args1-8 */ \
2074  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2075  "lwz 4,8(11)\n\t" \
2076  "lwz 5,12(11)\n\t" \
2077  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2078  "lwz 7,20(11)\n\t" \
2079  "lwz 8,24(11)\n\t" \
2080  "lwz 9,28(11)\n\t" \
2081  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2082  "lwz 11,0(11)\n\t" /* target->r11 */ \
2083  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2084  : /*out*/ "=r"( _res ) \
2085  : /*in*/ "r"( &_argvec[0] ) \
2086  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2087  lval = (__typeof__( lval ))_res; \
2088  } while ( 0 )
2089 
2090 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2091  do { \
2092  volatile OrigFn _orig = ( orig ); \
2093  volatile unsigned long _argvec[13]; \
2094  volatile unsigned long _res; \
2095  _argvec[0] = (unsigned long)_orig.nraddr; \
2096  _argvec[1] = (unsigned long)arg1; \
2097  _argvec[2] = (unsigned long)arg2; \
2098  _argvec[3] = (unsigned long)arg3; \
2099  _argvec[4] = (unsigned long)arg4; \
2100  _argvec[5] = (unsigned long)arg5; \
2101  _argvec[6] = (unsigned long)arg6; \
2102  _argvec[7] = (unsigned long)arg7; \
2103  _argvec[8] = (unsigned long)arg8; \
2104  _argvec[9] = (unsigned long)arg9; \
2105  _argvec[10] = (unsigned long)arg10; \
2106  _argvec[11] = (unsigned long)arg11; \
2107  _argvec[12] = (unsigned long)arg12; \
2108  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2109  "addi 1,1,-32\n\t" /* arg12 */ \
2110  "lwz 3,48(11)\n\t" \
2111  "stw 3,20(1)\n\t" /* arg11 */ \
2112  "lwz 3,44(11)\n\t" \
2113  "stw 3,16(1)\n\t" /* arg10 */ \
2114  "lwz 3,40(11)\n\t" \
2115  "stw 3,12(1)\n\t" /* arg9 */ \
2116  "lwz 3,36(11)\n\t" \
2117  "stw 3,8(1)\n\t" /* args1-8 */ \
2118  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2119  "lwz 4,8(11)\n\t" \
2120  "lwz 5,12(11)\n\t" \
2121  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2122  "lwz 7,20(11)\n\t" \
2123  "lwz 8,24(11)\n\t" \
2124  "lwz 9,28(11)\n\t" \
2125  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2126  "lwz 11,0(11)\n\t" /* target->r11 */ \
2127  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2128  : /*out*/ "=r"( _res ) \
2129  : /*in*/ "r"( &_argvec[0] ) \
2130  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2131  lval = (__typeof__( lval ))_res; \
2132  } while ( 0 )
2133 
2134 # endif /* PLAT_ppc32_linux */
2135 
2136 /* ------------------------ ppc64-linux ------------------------ */
2137 
2138 # if defined( PLAT_ppc64_linux )
2139 
2140 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2141 
2142 /* These regs are trashed by the hidden call. */
2143 # define __CALLER_SAVED_REGS \
2144  "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
2145  "r7", "r8", "r9", "r10", "r11", "r12", "r13"
2146 
2147 /* Macros to save and align the stack before making a function
2148  call and restore it afterwards as gcc may not keep the stack
2149  pointer aligned if it doesn't realise calls are being made
2150  to other functions. */
2151 
2152 # define VALGRIND_ALIGN_STACK \
2153  "mr 28,1\n\t" \
2154  "rldicr 1,1,0,59\n\t"
2155 # define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
2156 
2157 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2158  long) == 8. */
2159 
2160 # define CALL_FN_W_v( lval, orig ) \
2161  do { \
2162  volatile OrigFn _orig = ( orig ); \
2163  volatile unsigned long _argvec[3 + 0]; \
2164  volatile unsigned long _res; \
2165  /* _argvec[0] holds current r2 across the call */ \
2166  _argvec[1] = (unsigned long)_orig.r2; \
2167  _argvec[2] = (unsigned long)_orig.nraddr; \
2168  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2169  "std 2,-16(11)\n\t" /* save tocptr */ \
2170  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2171  "ld 11, 0(11)\n\t" /* target->r11 */ \
2172  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2173  "mr %0,3\n\t" \
2174  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2175  VALGRIND_RESTORE_STACK \
2176  : /*out*/ "=r"( _res ) \
2177  : /*in*/ "r"( &_argvec[2] ) \
2178  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2179  lval = (__typeof__( lval ))_res; \
2180  } while ( 0 )
2181 
2182 # define CALL_FN_W_W( lval, orig, arg1 ) \
2183  do { \
2184  volatile OrigFn _orig = ( orig ); \
2185  volatile unsigned long _argvec[3 + 1]; \
2186  volatile unsigned long _res; \
2187  /* _argvec[0] holds current r2 across the call */ \
2188  _argvec[1] = (unsigned long)_orig.r2; \
2189  _argvec[2] = (unsigned long)_orig.nraddr; \
2190  _argvec[2 + 1] = (unsigned long)arg1; \
2191  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2192  "std 2,-16(11)\n\t" /* save tocptr */ \
2193  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2194  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2195  "ld 11, 0(11)\n\t" /* target->r11 */ \
2196  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2197  "mr %0,3\n\t" \
2198  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2199  VALGRIND_RESTORE_STACK \
2200  : /*out*/ "=r"( _res ) \
2201  : /*in*/ "r"( &_argvec[2] ) \
2202  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2203  lval = (__typeof__( lval ))_res; \
2204  } while ( 0 )
2205 
2206 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
2207  do { \
2208  volatile OrigFn _orig = ( orig ); \
2209  volatile unsigned long _argvec[3 + 2]; \
2210  volatile unsigned long _res; \
2211  /* _argvec[0] holds current r2 across the call */ \
2212  _argvec[1] = (unsigned long)_orig.r2; \
2213  _argvec[2] = (unsigned long)_orig.nraddr; \
2214  _argvec[2 + 1] = (unsigned long)arg1; \
2215  _argvec[2 + 2] = (unsigned long)arg2; \
2216  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2217  "std 2,-16(11)\n\t" /* save tocptr */ \
2218  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2219  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2220  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2221  "ld 11, 0(11)\n\t" /* target->r11 */ \
2222  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2223  "mr %0,3\n\t" \
2224  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2225  VALGRIND_RESTORE_STACK \
2226  : /*out*/ "=r"( _res ) \
2227  : /*in*/ "r"( &_argvec[2] ) \
2228  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2229  lval = (__typeof__( lval ))_res; \
2230  } while ( 0 )
2231 
2232 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
2233  do { \
2234  volatile OrigFn _orig = ( orig ); \
2235  volatile unsigned long _argvec[3 + 3]; \
2236  volatile unsigned long _res; \
2237  /* _argvec[0] holds current r2 across the call */ \
2238  _argvec[1] = (unsigned long)_orig.r2; \
2239  _argvec[2] = (unsigned long)_orig.nraddr; \
2240  _argvec[2 + 1] = (unsigned long)arg1; \
2241  _argvec[2 + 2] = (unsigned long)arg2; \
2242  _argvec[2 + 3] = (unsigned long)arg3; \
2243  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2244  "std 2,-16(11)\n\t" /* save tocptr */ \
2245  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2246  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2247  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2248  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2249  "ld 11, 0(11)\n\t" /* target->r11 */ \
2250  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2251  "mr %0,3\n\t" \
2252  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2253  VALGRIND_RESTORE_STACK \
2254  : /*out*/ "=r"( _res ) \
2255  : /*in*/ "r"( &_argvec[2] ) \
2256  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2257  lval = (__typeof__( lval ))_res; \
2258  } while ( 0 )
2259 
2260 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
2261  do { \
2262  volatile OrigFn _orig = ( orig ); \
2263  volatile unsigned long _argvec[3 + 4]; \
2264  volatile unsigned long _res; \
2265  /* _argvec[0] holds current r2 across the call */ \
2266  _argvec[1] = (unsigned long)_orig.r2; \
2267  _argvec[2] = (unsigned long)_orig.nraddr; \
2268  _argvec[2 + 1] = (unsigned long)arg1; \
2269  _argvec[2 + 2] = (unsigned long)arg2; \
2270  _argvec[2 + 3] = (unsigned long)arg3; \
2271  _argvec[2 + 4] = (unsigned long)arg4; \
2272  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2273  "std 2,-16(11)\n\t" /* save tocptr */ \
2274  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2275  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2276  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2277  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2278  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2279  "ld 11, 0(11)\n\t" /* target->r11 */ \
2280  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2281  "mr %0,3\n\t" \
2282  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2283  VALGRIND_RESTORE_STACK \
2284  : /*out*/ "=r"( _res ) \
2285  : /*in*/ "r"( &_argvec[2] ) \
2286  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2287  lval = (__typeof__( lval ))_res; \
2288  } while ( 0 )
2289 
2290 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
2291  do { \
2292  volatile OrigFn _orig = ( orig ); \
2293  volatile unsigned long _argvec[3 + 5]; \
2294  volatile unsigned long _res; \
2295  /* _argvec[0] holds current r2 across the call */ \
2296  _argvec[1] = (unsigned long)_orig.r2; \
2297  _argvec[2] = (unsigned long)_orig.nraddr; \
2298  _argvec[2 + 1] = (unsigned long)arg1; \
2299  _argvec[2 + 2] = (unsigned long)arg2; \
2300  _argvec[2 + 3] = (unsigned long)arg3; \
2301  _argvec[2 + 4] = (unsigned long)arg4; \
2302  _argvec[2 + 5] = (unsigned long)arg5; \
2303  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2304  "std 2,-16(11)\n\t" /* save tocptr */ \
2305  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2306  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2307  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2308  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2309  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2310  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2311  "ld 11, 0(11)\n\t" /* target->r11 */ \
2312  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2313  "mr %0,3\n\t" \
2314  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2315  VALGRIND_RESTORE_STACK \
2316  : /*out*/ "=r"( _res ) \
2317  : /*in*/ "r"( &_argvec[2] ) \
2318  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2319  lval = (__typeof__( lval ))_res; \
2320  } while ( 0 )
2321 
2322 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
2323  do { \
2324  volatile OrigFn _orig = ( orig ); \
2325  volatile unsigned long _argvec[3 + 6]; \
2326  volatile unsigned long _res; \
2327  /* _argvec[0] holds current r2 across the call */ \
2328  _argvec[1] = (unsigned long)_orig.r2; \
2329  _argvec[2] = (unsigned long)_orig.nraddr; \
2330  _argvec[2 + 1] = (unsigned long)arg1; \
2331  _argvec[2 + 2] = (unsigned long)arg2; \
2332  _argvec[2 + 3] = (unsigned long)arg3; \
2333  _argvec[2 + 4] = (unsigned long)arg4; \
2334  _argvec[2 + 5] = (unsigned long)arg5; \
2335  _argvec[2 + 6] = (unsigned long)arg6; \
2336  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2337  "std 2,-16(11)\n\t" /* save tocptr */ \
2338  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2339  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2340  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2341  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2342  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2343  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2344  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2345  "ld 11, 0(11)\n\t" /* target->r11 */ \
2346  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2347  "mr %0,3\n\t" \
2348  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2349  VALGRIND_RESTORE_STACK \
2350  : /*out*/ "=r"( _res ) \
2351  : /*in*/ "r"( &_argvec[2] ) \
2352  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2353  lval = (__typeof__( lval ))_res; \
2354  } while ( 0 )
2355 
2356 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
2357  do { \
2358  volatile OrigFn _orig = ( orig ); \
2359  volatile unsigned long _argvec[3 + 7]; \
2360  volatile unsigned long _res; \
2361  /* _argvec[0] holds current r2 across the call */ \
2362  _argvec[1] = (unsigned long)_orig.r2; \
2363  _argvec[2] = (unsigned long)_orig.nraddr; \
2364  _argvec[2 + 1] = (unsigned long)arg1; \
2365  _argvec[2 + 2] = (unsigned long)arg2; \
2366  _argvec[2 + 3] = (unsigned long)arg3; \
2367  _argvec[2 + 4] = (unsigned long)arg4; \
2368  _argvec[2 + 5] = (unsigned long)arg5; \
2369  _argvec[2 + 6] = (unsigned long)arg6; \
2370  _argvec[2 + 7] = (unsigned long)arg7; \
2371  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2372  "std 2,-16(11)\n\t" /* save tocptr */ \
2373  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2374  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2375  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2376  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2377  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2378  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2379  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2380  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2381  "ld 11, 0(11)\n\t" /* target->r11 */ \
2382  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2383  "mr %0,3\n\t" \
2384  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2385  VALGRIND_RESTORE_STACK \
2386  : /*out*/ "=r"( _res ) \
2387  : /*in*/ "r"( &_argvec[2] ) \
2388  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2389  lval = (__typeof__( lval ))_res; \
2390  } while ( 0 )
2391 
2392 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
2393  do { \
2394  volatile OrigFn _orig = ( orig ); \
2395  volatile unsigned long _argvec[3 + 8]; \
2396  volatile unsigned long _res; \
2397  /* _argvec[0] holds current r2 across the call */ \
2398  _argvec[1] = (unsigned long)_orig.r2; \
2399  _argvec[2] = (unsigned long)_orig.nraddr; \
2400  _argvec[2 + 1] = (unsigned long)arg1; \
2401  _argvec[2 + 2] = (unsigned long)arg2; \
2402  _argvec[2 + 3] = (unsigned long)arg3; \
2403  _argvec[2 + 4] = (unsigned long)arg4; \
2404  _argvec[2 + 5] = (unsigned long)arg5; \
2405  _argvec[2 + 6] = (unsigned long)arg6; \
2406  _argvec[2 + 7] = (unsigned long)arg7; \
2407  _argvec[2 + 8] = (unsigned long)arg8; \
2408  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2409  "std 2,-16(11)\n\t" /* save tocptr */ \
2410  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2411  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2412  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2413  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2414  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2415  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2416  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2417  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2418  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2419  "ld 11, 0(11)\n\t" /* target->r11 */ \
2420  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2421  "mr %0,3\n\t" \
2422  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2423  VALGRIND_RESTORE_STACK \
2424  : /*out*/ "=r"( _res ) \
2425  : /*in*/ "r"( &_argvec[2] ) \
2426  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2427  lval = (__typeof__( lval ))_res; \
2428  } while ( 0 )
2429 
2430 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
2431  do { \
2432  volatile OrigFn _orig = ( orig ); \
2433  volatile unsigned long _argvec[3 + 9]; \
2434  volatile unsigned long _res; \
2435  /* _argvec[0] holds current r2 across the call */ \
2436  _argvec[1] = (unsigned long)_orig.r2; \
2437  _argvec[2] = (unsigned long)_orig.nraddr; \
2438  _argvec[2 + 1] = (unsigned long)arg1; \
2439  _argvec[2 + 2] = (unsigned long)arg2; \
2440  _argvec[2 + 3] = (unsigned long)arg3; \
2441  _argvec[2 + 4] = (unsigned long)arg4; \
2442  _argvec[2 + 5] = (unsigned long)arg5; \
2443  _argvec[2 + 6] = (unsigned long)arg6; \
2444  _argvec[2 + 7] = (unsigned long)arg7; \
2445  _argvec[2 + 8] = (unsigned long)arg8; \
2446  _argvec[2 + 9] = (unsigned long)arg9; \
2447  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2448  "std 2,-16(11)\n\t" /* save tocptr */ \
2449  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2450  "addi 1,1,-128\n\t" /* expand stack frame */ /* arg9 */ \
2451  "ld 3,72(11)\n\t" \
2452  "std 3,112(1)\n\t" /* args1-8 */ \
2453  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2454  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2455  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2456  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2457  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2458  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2459  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2460  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2461  "ld 11, 0(11)\n\t" /* target->r11 */ \
2462  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2463  "mr %0,3\n\t" \
2464  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2465  VALGRIND_RESTORE_STACK \
2466  : /*out*/ "=r"( _res ) \
2467  : /*in*/ "r"( &_argvec[2] ) \
2468  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2469  lval = (__typeof__( lval ))_res; \
2470  } while ( 0 )
2471 
2472 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2473  do { \
2474  volatile OrigFn _orig = ( orig ); \
2475  volatile unsigned long _argvec[3 + 10]; \
2476  volatile unsigned long _res; \
2477  /* _argvec[0] holds current r2 across the call */ \
2478  _argvec[1] = (unsigned long)_orig.r2; \
2479  _argvec[2] = (unsigned long)_orig.nraddr; \
2480  _argvec[2 + 1] = (unsigned long)arg1; \
2481  _argvec[2 + 2] = (unsigned long)arg2; \
2482  _argvec[2 + 3] = (unsigned long)arg3; \
2483  _argvec[2 + 4] = (unsigned long)arg4; \
2484  _argvec[2 + 5] = (unsigned long)arg5; \
2485  _argvec[2 + 6] = (unsigned long)arg6; \
2486  _argvec[2 + 7] = (unsigned long)arg7; \
2487  _argvec[2 + 8] = (unsigned long)arg8; \
2488  _argvec[2 + 9] = (unsigned long)arg9; \
2489  _argvec[2 + 10] = (unsigned long)arg10; \
2490  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2491  "std 2,-16(11)\n\t" /* save tocptr */ \
2492  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2493  "addi 1,1,-128\n\t" /* expand stack frame */ /* arg10 */ \
2494  "ld 3,80(11)\n\t" \
2495  "std 3,120(1)\n\t" /* arg9 */ \
2496  "ld 3,72(11)\n\t" \
2497  "std 3,112(1)\n\t" /* args1-8 */ \
2498  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2499  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2500  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2501  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2502  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2503  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2504  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2505  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2506  "ld 11, 0(11)\n\t" /* target->r11 */ \
2507  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2508  "mr %0,3\n\t" \
2509  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2510  VALGRIND_RESTORE_STACK \
2511  : /*out*/ "=r"( _res ) \
2512  : /*in*/ "r"( &_argvec[2] ) \
2513  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2514  lval = (__typeof__( lval ))_res; \
2515  } while ( 0 )
2516 
2517 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2518  do { \
2519  volatile OrigFn _orig = ( orig ); \
2520  volatile unsigned long _argvec[3 + 11]; \
2521  volatile unsigned long _res; \
2522  /* _argvec[0] holds current r2 across the call */ \
2523  _argvec[1] = (unsigned long)_orig.r2; \
2524  _argvec[2] = (unsigned long)_orig.nraddr; \
2525  _argvec[2 + 1] = (unsigned long)arg1; \
2526  _argvec[2 + 2] = (unsigned long)arg2; \
2527  _argvec[2 + 3] = (unsigned long)arg3; \
2528  _argvec[2 + 4] = (unsigned long)arg4; \
2529  _argvec[2 + 5] = (unsigned long)arg5; \
2530  _argvec[2 + 6] = (unsigned long)arg6; \
2531  _argvec[2 + 7] = (unsigned long)arg7; \
2532  _argvec[2 + 8] = (unsigned long)arg8; \
2533  _argvec[2 + 9] = (unsigned long)arg9; \
2534  _argvec[2 + 10] = (unsigned long)arg10; \
2535  _argvec[2 + 11] = (unsigned long)arg11; \
2536  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2537  "std 2,-16(11)\n\t" /* save tocptr */ \
2538  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2539  "addi 1,1,-144\n\t" /* expand stack frame */ /* arg11 */ \
2540  "ld 3,88(11)\n\t" \
2541  "std 3,128(1)\n\t" /* arg10 */ \
2542  "ld 3,80(11)\n\t" \
2543  "std 3,120(1)\n\t" /* arg9 */ \
2544  "ld 3,72(11)\n\t" \
2545  "std 3,112(1)\n\t" /* args1-8 */ \
2546  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2547  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2548  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2549  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2550  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2551  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2552  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2553  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2554  "ld 11, 0(11)\n\t" /* target->r11 */ \
2555  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2556  "mr %0,3\n\t" \
2557  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2558  VALGRIND_RESTORE_STACK \
2559  : /*out*/ "=r"( _res ) \
2560  : /*in*/ "r"( &_argvec[2] ) \
2561  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2562  lval = (__typeof__( lval ))_res; \
2563  } while ( 0 )
2564 
2565 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2566  do { \
2567  volatile OrigFn _orig = ( orig ); \
2568  volatile unsigned long _argvec[3 + 12]; \
2569  volatile unsigned long _res; \
2570  /* _argvec[0] holds current r2 across the call */ \
2571  _argvec[1] = (unsigned long)_orig.r2; \
2572  _argvec[2] = (unsigned long)_orig.nraddr; \
2573  _argvec[2 + 1] = (unsigned long)arg1; \
2574  _argvec[2 + 2] = (unsigned long)arg2; \
2575  _argvec[2 + 3] = (unsigned long)arg3; \
2576  _argvec[2 + 4] = (unsigned long)arg4; \
2577  _argvec[2 + 5] = (unsigned long)arg5; \
2578  _argvec[2 + 6] = (unsigned long)arg6; \
2579  _argvec[2 + 7] = (unsigned long)arg7; \
2580  _argvec[2 + 8] = (unsigned long)arg8; \
2581  _argvec[2 + 9] = (unsigned long)arg9; \
2582  _argvec[2 + 10] = (unsigned long)arg10; \
2583  _argvec[2 + 11] = (unsigned long)arg11; \
2584  _argvec[2 + 12] = (unsigned long)arg12; \
2585  __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2586  "std 2,-16(11)\n\t" /* save tocptr */ \
2587  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2588  "addi 1,1,-144\n\t" /* expand stack frame */ /* arg12 */ \
2589  "ld 3,96(11)\n\t" \
2590  "std 3,136(1)\n\t" /* arg11 */ \
2591  "ld 3,88(11)\n\t" \
2592  "std 3,128(1)\n\t" /* arg10 */ \
2593  "ld 3,80(11)\n\t" \
2594  "std 3,120(1)\n\t" /* arg9 */ \
2595  "ld 3,72(11)\n\t" \
2596  "std 3,112(1)\n\t" /* args1-8 */ \
2597  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2598  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2599  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2600  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2601  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2602  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2603  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2604  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2605  "ld 11, 0(11)\n\t" /* target->r11 */ \
2606  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2607  "mr %0,3\n\t" \
2608  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2609  VALGRIND_RESTORE_STACK \
2610  : /*out*/ "=r"( _res ) \
2611  : /*in*/ "r"( &_argvec[2] ) \
2612  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2613  lval = (__typeof__( lval ))_res; \
2614  } while ( 0 )
2615 
2616 # endif /* PLAT_ppc64_linux */
2617 
2618 /* ------------------------- arm-linux ------------------------- */
2619 
2620 # if defined( PLAT_arm_linux )
2621 
2622 /* These regs are trashed by the hidden call. */
2623 # define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r14"
2624 
2625 /* Macros to save and align the stack before making a function
2626  call and restore it afterwards as gcc may not keep the stack
2627  pointer aligned if it doesn't realise calls are being made
2628  to other functions. */
2629 
2630 /* This is a bit tricky. We store the original stack pointer in r10
2631  as it is callee-saves. gcc doesn't allow the use of r11 for some
2632  reason. Also, we can't directly "bic" the stack pointer in thumb
2633  mode since r13 isn't an allowed register number in that context.
2634  So use r4 as a temporary, since that is about to get trashed
2635  anyway, just after each use of this macro. Side effect is we need
2636  to be very careful about any future changes, since
2637  VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
2638 # define VALGRIND_ALIGN_STACK \
2639  "mov r10, sp\n\t" \
2640  "mov r4, sp\n\t" \
2641  "bic r4, r4, #7\n\t" \
2642  "mov sp, r4\n\t"
2643 # define VALGRIND_RESTORE_STACK "mov sp, r10\n\t"
2644 
2645 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
2646  long) == 4. */
2647 
2648 # define CALL_FN_W_v( lval, orig ) \
2649  do { \
2650  volatile OrigFn _orig = ( orig ); \
2651  volatile unsigned long _argvec[1]; \
2652  volatile unsigned long _res; \
2653  _argvec[0] = (unsigned long)_orig.nraddr; \
2654  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r4, [%1] \n\t" /* target->r4 */ \
2655  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2656  : /*out*/ "=r"( _res ) \
2657  : /*in*/ "0"( &_argvec[0] ) \
2658  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2659  lval = (__typeof__( lval ))_res; \
2660  } while ( 0 )
2661 
2662 # define CALL_FN_W_W( lval, orig, arg1 ) \
2663  do { \
2664  volatile OrigFn _orig = ( orig ); \
2665  volatile unsigned long _argvec[2]; \
2666  volatile unsigned long _res; \
2667  _argvec[0] = (unsigned long)_orig.nraddr; \
2668  _argvec[1] = (unsigned long)( arg1 ); \
2669  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2670  "ldr r4, [%1] \n\t" /* target->r4 */ \
2671  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2672  : /*out*/ "=r"( _res ) \
2673  : /*in*/ "0"( &_argvec[0] ) \
2674  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2675  lval = (__typeof__( lval ))_res; \
2676  } while ( 0 )
2677 
2678 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
2679  do { \
2680  volatile OrigFn _orig = ( orig ); \
2681  volatile unsigned long _argvec[3]; \
2682  volatile unsigned long _res; \
2683  _argvec[0] = (unsigned long)_orig.nraddr; \
2684  _argvec[1] = (unsigned long)( arg1 ); \
2685  _argvec[2] = (unsigned long)( arg2 ); \
2686  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2687  "ldr r1, [%1, #8] \n\t" \
2688  "ldr r4, [%1] \n\t" /* target->r4 */ \
2689  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2690  : /*out*/ "=r"( _res ) \
2691  : /*in*/ "0"( &_argvec[0] ) \
2692  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2693  lval = (__typeof__( lval ))_res; \
2694  } while ( 0 )
2695 
2696 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
2697  do { \
2698  volatile OrigFn _orig = ( orig ); \
2699  volatile unsigned long _argvec[4]; \
2700  volatile unsigned long _res; \
2701  _argvec[0] = (unsigned long)_orig.nraddr; \
2702  _argvec[1] = (unsigned long)( arg1 ); \
2703  _argvec[2] = (unsigned long)( arg2 ); \
2704  _argvec[3] = (unsigned long)( arg3 ); \
2705  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2706  "ldr r1, [%1, #8] \n\t" \
2707  "ldr r2, [%1, #12] \n\t" \
2708  "ldr r4, [%1] \n\t" /* target->r4 */ \
2709  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2710  : /*out*/ "=r"( _res ) \
2711  : /*in*/ "0"( &_argvec[0] ) \
2712  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2713  lval = (__typeof__( lval ))_res; \
2714  } while ( 0 )
2715 
2716 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
2717  do { \
2718  volatile OrigFn _orig = ( orig ); \
2719  volatile unsigned long _argvec[5]; \
2720  volatile unsigned long _res; \
2721  _argvec[0] = (unsigned long)_orig.nraddr; \
2722  _argvec[1] = (unsigned long)( arg1 ); \
2723  _argvec[2] = (unsigned long)( arg2 ); \
2724  _argvec[3] = (unsigned long)( arg3 ); \
2725  _argvec[4] = (unsigned long)( arg4 ); \
2726  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2727  "ldr r1, [%1, #8] \n\t" \
2728  "ldr r2, [%1, #12] \n\t" \
2729  "ldr r3, [%1, #16] \n\t" \
2730  "ldr r4, [%1] \n\t" /* target->r4 */ \
2731  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2732  : /*out*/ "=r"( _res ) \
2733  : /*in*/ "0"( &_argvec[0] ) \
2734  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2735  lval = (__typeof__( lval ))_res; \
2736  } while ( 0 )
2737 
2738 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
2739  do { \
2740  volatile OrigFn _orig = ( orig ); \
2741  volatile unsigned long _argvec[6]; \
2742  volatile unsigned long _res; \
2743  _argvec[0] = (unsigned long)_orig.nraddr; \
2744  _argvec[1] = (unsigned long)( arg1 ); \
2745  _argvec[2] = (unsigned long)( arg2 ); \
2746  _argvec[3] = (unsigned long)( arg3 ); \
2747  _argvec[4] = (unsigned long)( arg4 ); \
2748  _argvec[5] = (unsigned long)( arg5 ); \
2749  __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2750  "ldr r0, [%1, #20] \n\t" \
2751  "push {r0} \n\t" \
2752  "ldr r0, [%1, #4] \n\t" \
2753  "ldr r1, [%1, #8] \n\t" \
2754  "ldr r2, [%1, #12] \n\t" \
2755  "ldr r3, [%1, #16] \n\t" \
2756  "ldr r4, [%1] \n\t" /* target->r4 */ \
2757  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2758  : /*out*/ "=r"( _res ) \
2759  : /*in*/ "0"( &_argvec[0] ) \
2760  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2761  lval = (__typeof__( lval ))_res; \
2762  } while ( 0 )
2763 
2764 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
2765  do { \
2766  volatile OrigFn _orig = ( orig ); \
2767  volatile unsigned long _argvec[7]; \
2768  volatile unsigned long _res; \
2769  _argvec[0] = (unsigned long)_orig.nraddr; \
2770  _argvec[1] = (unsigned long)( arg1 ); \
2771  _argvec[2] = (unsigned long)( arg2 ); \
2772  _argvec[3] = (unsigned long)( arg3 ); \
2773  _argvec[4] = (unsigned long)( arg4 ); \
2774  _argvec[5] = (unsigned long)( arg5 ); \
2775  _argvec[6] = (unsigned long)( arg6 ); \
2776  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2777  "ldr r1, [%1, #24] \n\t" \
2778  "push {r0, r1} \n\t" \
2779  "ldr r0, [%1, #4] \n\t" \
2780  "ldr r1, [%1, #8] \n\t" \
2781  "ldr r2, [%1, #12] \n\t" \
2782  "ldr r3, [%1, #16] \n\t" \
2783  "ldr r4, [%1] \n\t" /* target->r4 */ \
2784  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2785  : /*out*/ "=r"( _res ) \
2786  : /*in*/ "0"( &_argvec[0] ) \
2787  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2788  lval = (__typeof__( lval ))_res; \
2789  } while ( 0 )
2790 
2791 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
2792  do { \
2793  volatile OrigFn _orig = ( orig ); \
2794  volatile unsigned long _argvec[8]; \
2795  volatile unsigned long _res; \
2796  _argvec[0] = (unsigned long)_orig.nraddr; \
2797  _argvec[1] = (unsigned long)( arg1 ); \
2798  _argvec[2] = (unsigned long)( arg2 ); \
2799  _argvec[3] = (unsigned long)( arg3 ); \
2800  _argvec[4] = (unsigned long)( arg4 ); \
2801  _argvec[5] = (unsigned long)( arg5 ); \
2802  _argvec[6] = (unsigned long)( arg6 ); \
2803  _argvec[7] = (unsigned long)( arg7 ); \
2804  __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2805  "ldr r0, [%1, #20] \n\t" \
2806  "ldr r1, [%1, #24] \n\t" \
2807  "ldr r2, [%1, #28] \n\t" \
2808  "push {r0, r1, r2} \n\t" \
2809  "ldr r0, [%1, #4] \n\t" \
2810  "ldr r1, [%1, #8] \n\t" \
2811  "ldr r2, [%1, #12] \n\t" \
2812  "ldr r3, [%1, #16] \n\t" \
2813  "ldr r4, [%1] \n\t" /* target->r4 */ \
2814  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2815  : /*out*/ "=r"( _res ) \
2816  : /*in*/ "0"( &_argvec[0] ) \
2817  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2818  lval = (__typeof__( lval ))_res; \
2819  } while ( 0 )
2820 
2821 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
2822  do { \
2823  volatile OrigFn _orig = ( orig ); \
2824  volatile unsigned long _argvec[9]; \
2825  volatile unsigned long _res; \
2826  _argvec[0] = (unsigned long)_orig.nraddr; \
2827  _argvec[1] = (unsigned long)( arg1 ); \
2828  _argvec[2] = (unsigned long)( arg2 ); \
2829  _argvec[3] = (unsigned long)( arg3 ); \
2830  _argvec[4] = (unsigned long)( arg4 ); \
2831  _argvec[5] = (unsigned long)( arg5 ); \
2832  _argvec[6] = (unsigned long)( arg6 ); \
2833  _argvec[7] = (unsigned long)( arg7 ); \
2834  _argvec[8] = (unsigned long)( arg8 ); \
2835  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2836  "ldr r1, [%1, #24] \n\t" \
2837  "ldr r2, [%1, #28] \n\t" \
2838  "ldr r3, [%1, #32] \n\t" \
2839  "push {r0, r1, r2, r3} \n\t" \
2840  "ldr r0, [%1, #4] \n\t" \
2841  "ldr r1, [%1, #8] \n\t" \
2842  "ldr r2, [%1, #12] \n\t" \
2843  "ldr r3, [%1, #16] \n\t" \
2844  "ldr r4, [%1] \n\t" /* target->r4 */ \
2845  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2846  : /*out*/ "=r"( _res ) \
2847  : /*in*/ "0"( &_argvec[0] ) \
2848  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2849  lval = (__typeof__( lval ))_res; \
2850  } while ( 0 )
2851 
2852 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
2853  do { \
2854  volatile OrigFn _orig = ( orig ); \
2855  volatile unsigned long _argvec[10]; \
2856  volatile unsigned long _res; \
2857  _argvec[0] = (unsigned long)_orig.nraddr; \
2858  _argvec[1] = (unsigned long)( arg1 ); \
2859  _argvec[2] = (unsigned long)( arg2 ); \
2860  _argvec[3] = (unsigned long)( arg3 ); \
2861  _argvec[4] = (unsigned long)( arg4 ); \
2862  _argvec[5] = (unsigned long)( arg5 ); \
2863  _argvec[6] = (unsigned long)( arg6 ); \
2864  _argvec[7] = (unsigned long)( arg7 ); \
2865  _argvec[8] = (unsigned long)( arg8 ); \
2866  _argvec[9] = (unsigned long)( arg9 ); \
2867  __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2868  "ldr r0, [%1, #20] \n\t" \
2869  "ldr r1, [%1, #24] \n\t" \
2870  "ldr r2, [%1, #28] \n\t" \
2871  "ldr r3, [%1, #32] \n\t" \
2872  "ldr r4, [%1, #36] \n\t" \
2873  "push {r0, r1, r2, r3, r4} \n\t" \
2874  "ldr r0, [%1, #4] \n\t" \
2875  "ldr r1, [%1, #8] \n\t" \
2876  "ldr r2, [%1, #12] \n\t" \
2877  "ldr r3, [%1, #16] \n\t" \
2878  "ldr r4, [%1] \n\t" /* target->r4 */ \
2879  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2880  : /*out*/ "=r"( _res ) \
2881  : /*in*/ "0"( &_argvec[0] ) \
2882  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2883  lval = (__typeof__( lval ))_res; \
2884  } while ( 0 )
2885 
2886 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2887  do { \
2888  volatile OrigFn _orig = ( orig ); \
2889  volatile unsigned long _argvec[11]; \
2890  volatile unsigned long _res; \
2891  _argvec[0] = (unsigned long)_orig.nraddr; \
2892  _argvec[1] = (unsigned long)( arg1 ); \
2893  _argvec[2] = (unsigned long)( arg2 ); \
2894  _argvec[3] = (unsigned long)( arg3 ); \
2895  _argvec[4] = (unsigned long)( arg4 ); \
2896  _argvec[5] = (unsigned long)( arg5 ); \
2897  _argvec[6] = (unsigned long)( arg6 ); \
2898  _argvec[7] = (unsigned long)( arg7 ); \
2899  _argvec[8] = (unsigned long)( arg8 ); \
2900  _argvec[9] = (unsigned long)( arg9 ); \
2901  _argvec[10] = (unsigned long)( arg10 ); \
2902  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2903  "push {r0} \n\t" \
2904  "ldr r0, [%1, #20] \n\t" \
2905  "ldr r1, [%1, #24] \n\t" \
2906  "ldr r2, [%1, #28] \n\t" \
2907  "ldr r3, [%1, #32] \n\t" \
2908  "ldr r4, [%1, #36] \n\t" \
2909  "push {r0, r1, r2, r3, r4} \n\t" \
2910  "ldr r0, [%1, #4] \n\t" \
2911  "ldr r1, [%1, #8] \n\t" \
2912  "ldr r2, [%1, #12] \n\t" \
2913  "ldr r3, [%1, #16] \n\t" \
2914  "ldr r4, [%1] \n\t" /* target->r4 */ \
2915  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2916  : /*out*/ "=r"( _res ) \
2917  : /*in*/ "0"( &_argvec[0] ) \
2918  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2919  lval = (__typeof__( lval ))_res; \
2920  } while ( 0 )
2921 
2922 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2923  do { \
2924  volatile OrigFn _orig = ( orig ); \
2925  volatile unsigned long _argvec[12]; \
2926  volatile unsigned long _res; \
2927  _argvec[0] = (unsigned long)_orig.nraddr; \
2928  _argvec[1] = (unsigned long)( arg1 ); \
2929  _argvec[2] = (unsigned long)( arg2 ); \
2930  _argvec[3] = (unsigned long)( arg3 ); \
2931  _argvec[4] = (unsigned long)( arg4 ); \
2932  _argvec[5] = (unsigned long)( arg5 ); \
2933  _argvec[6] = (unsigned long)( arg6 ); \
2934  _argvec[7] = (unsigned long)( arg7 ); \
2935  _argvec[8] = (unsigned long)( arg8 ); \
2936  _argvec[9] = (unsigned long)( arg9 ); \
2937  _argvec[10] = (unsigned long)( arg10 ); \
2938  _argvec[11] = (unsigned long)( arg11 ); \
2939  __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2940  "ldr r0, [%1, #40] \n\t" \
2941  "ldr r1, [%1, #44] \n\t" \
2942  "push {r0, r1} \n\t" \
2943  "ldr r0, [%1, #20] \n\t" \
2944  "ldr r1, [%1, #24] \n\t" \
2945  "ldr r2, [%1, #28] \n\t" \
2946  "ldr r3, [%1, #32] \n\t" \
2947  "ldr r4, [%1, #36] \n\t" \
2948  "push {r0, r1, r2, r3, r4} \n\t" \
2949  "ldr r0, [%1, #4] \n\t" \
2950  "ldr r1, [%1, #8] \n\t" \
2951  "ldr r2, [%1, #12] \n\t" \
2952  "ldr r3, [%1, #16] \n\t" \
2953  "ldr r4, [%1] \n\t" /* target->r4 */ \
2954  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2955  : /*out*/ "=r"( _res ) \
2956  : /*in*/ "0"( &_argvec[0] ) \
2957  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2958  lval = (__typeof__( lval ))_res; \
2959  } while ( 0 )
2960 
2961 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2962  do { \
2963  volatile OrigFn _orig = ( orig ); \
2964  volatile unsigned long _argvec[13]; \
2965  volatile unsigned long _res; \
2966  _argvec[0] = (unsigned long)_orig.nraddr; \
2967  _argvec[1] = (unsigned long)( arg1 ); \
2968  _argvec[2] = (unsigned long)( arg2 ); \
2969  _argvec[3] = (unsigned long)( arg3 ); \
2970  _argvec[4] = (unsigned long)( arg4 ); \
2971  _argvec[5] = (unsigned long)( arg5 ); \
2972  _argvec[6] = (unsigned long)( arg6 ); \
2973  _argvec[7] = (unsigned long)( arg7 ); \
2974  _argvec[8] = (unsigned long)( arg8 ); \
2975  _argvec[9] = (unsigned long)( arg9 ); \
2976  _argvec[10] = (unsigned long)( arg10 ); \
2977  _argvec[11] = (unsigned long)( arg11 ); \
2978  _argvec[12] = (unsigned long)( arg12 ); \
2979  __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2980  "ldr r1, [%1, #44] \n\t" \
2981  "ldr r2, [%1, #48] \n\t" \
2982  "push {r0, r1, r2} \n\t" \
2983  "ldr r0, [%1, #20] \n\t" \
2984  "ldr r1, [%1, #24] \n\t" \
2985  "ldr r2, [%1, #28] \n\t" \
2986  "ldr r3, [%1, #32] \n\t" \
2987  "ldr r4, [%1, #36] \n\t" \
2988  "push {r0, r1, r2, r3, r4} \n\t" \
2989  "ldr r0, [%1, #4] \n\t" \
2990  "ldr r1, [%1, #8] \n\t" \
2991  "ldr r2, [%1, #12] \n\t" \
2992  "ldr r3, [%1, #16] \n\t" \
2993  "ldr r4, [%1] \n\t" /* target->r4 */ \
2994  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2995  : /*out*/ "=r"( _res ) \
2996  : /*in*/ "0"( &_argvec[0] ) \
2997  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2998  lval = (__typeof__( lval ))_res; \
2999  } while ( 0 )
3000 
3001 # endif /* PLAT_arm_linux */
3002 
3003 /* ------------------------- s390x-linux ------------------------- */
3004 
3005 # if defined( PLAT_s390x_linux )
3006 
3007 /* Similar workaround as amd64 (see above), but we use r11 as frame
3008  pointer and save the old r11 in r7. r11 might be used for
3009  argvec, therefore we copy argvec in r1 since r1 is clobbered
3010  after the call anyway. */
3011 # if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM )
3012 # define __FRAME_POINTER , "d"( __builtin_dwarf_cfa() )
3013 # define VALGRIND_CFI_PROLOGUE \
3014  ".cfi_remember_state\n\t" \
3015  "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3016  "lgr 7,11\n\t" \
3017  "lgr 11,%2\n\t" \
3018  ".cfi_def_cfa r11, 0\n\t"
3019 # define VALGRIND_CFI_EPILOGUE \
3020  "lgr 11, 7\n\t" \
3021  ".cfi_restore_state\n\t"
3022 # else
3023 # define __FRAME_POINTER
3024 # define VALGRIND_CFI_PROLOGUE "lgr 1,%1\n\t"
3025 # define VALGRIND_CFI_EPILOGUE
3026 # endif
3027 
3028 /* Nb: On s390 the stack pointer is properly aligned *at all times*
3029  according to the s390 GCC maintainer. (The ABI specification is not
3030  precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
3031  VALGRIND_RESTORE_STACK are not defined here. */
3032 
3033 /* These regs are trashed by the hidden call. Note that we overwrite
3034  r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3035  function a proper return address. All others are ABI defined call
3036  clobbers. */
3037 # define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
3038 
3039 /* Nb: Although r11 is modified in the asm snippets below (inside
3040  VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
3041  two reasons:
3042  (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
3043  modified
3044  (2) GCC will complain that r11 cannot appear inside a clobber section,
3045  when compiled with -O -fno-omit-frame-pointer
3046  */
3047 
3048 # define CALL_FN_W_v( lval, orig ) \
3049  do { \
3050  volatile OrigFn _orig = ( orig ); \
3051  volatile unsigned long _argvec[1]; \
3052  volatile unsigned long _res; \
3053  _argvec[0] = (unsigned long)_orig.nraddr; \
3054  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3055  "lg 1, 0(1)\n\t" /* target->r1 */ \
3056  VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3057  "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3058  : /*out*/ "=d"( _res ) \
3059  : /*in*/ "d"(&_argvec[0])__FRAME_POINTER \
3060  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3061  lval = (__typeof__( lval ))_res; \
3062  } while ( 0 )
3063 
3064 /* The call abi has the arguments in r2-r6 and stack */
3065 # define CALL_FN_W_W( lval, orig, arg1 ) \
3066  do { \
3067  volatile OrigFn _orig = ( orig ); \
3068  volatile unsigned long _argvec[2]; \
3069  volatile unsigned long _res; \
3070  _argvec[0] = (unsigned long)_orig.nraddr; \
3071  _argvec[1] = (unsigned long)arg1; \
3072  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3073  "lg 2, 8(1)\n\t" \
3074  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3075  "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3076  : /*out*/ "=d"( _res ) \
3077  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3078  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3079  lval = (__typeof__( lval ))_res; \
3080  } while ( 0 )
3081 
3082 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3083  do { \
3084  volatile OrigFn _orig = ( orig ); \
3085  volatile unsigned long _argvec[3]; \
3086  volatile unsigned long _res; \
3087  _argvec[0] = (unsigned long)_orig.nraddr; \
3088  _argvec[1] = (unsigned long)arg1; \
3089  _argvec[2] = (unsigned long)arg2; \
3090  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3091  "lg 2, 8(1)\n\t" \
3092  "lg 3,16(1)\n\t" \
3093  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3094  "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3095  : /*out*/ "=d"( _res ) \
3096  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3097  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3098  lval = (__typeof__( lval ))_res; \
3099  } while ( 0 )
3100 
3101 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3102  do { \
3103  volatile OrigFn _orig = ( orig ); \
3104  volatile unsigned long _argvec[4]; \
3105  volatile unsigned long _res; \
3106  _argvec[0] = (unsigned long)_orig.nraddr; \
3107  _argvec[1] = (unsigned long)arg1; \
3108  _argvec[2] = (unsigned long)arg2; \
3109  _argvec[3] = (unsigned long)arg3; \
3110  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3111  "lg 2, 8(1)\n\t" \
3112  "lg 3,16(1)\n\t" \
3113  "lg 4,24(1)\n\t" \
3114  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3115  "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3116  : /*out*/ "=d"( _res ) \
3117  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3118  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3119  lval = (__typeof__( lval ))_res; \
3120  } while ( 0 )
3121 
3122 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3123  do { \
3124  volatile OrigFn _orig = ( orig ); \
3125  volatile unsigned long _argvec[5]; \
3126  volatile unsigned long _res; \
3127  _argvec[0] = (unsigned long)_orig.nraddr; \
3128  _argvec[1] = (unsigned long)arg1; \
3129  _argvec[2] = (unsigned long)arg2; \
3130  _argvec[3] = (unsigned long)arg3; \
3131  _argvec[4] = (unsigned long)arg4; \
3132  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3133  "lg 2, 8(1)\n\t" \
3134  "lg 3,16(1)\n\t" \
3135  "lg 4,24(1)\n\t" \
3136  "lg 5,32(1)\n\t" \
3137  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3138  "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3139  : /*out*/ "=d"( _res ) \
3140  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3141  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3142  lval = (__typeof__( lval ))_res; \
3143  } while ( 0 )
3144 
3145 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3146  do { \
3147  volatile OrigFn _orig = ( orig ); \
3148  volatile unsigned long _argvec[6]; \
3149  volatile unsigned long _res; \
3150  _argvec[0] = (unsigned long)_orig.nraddr; \
3151  _argvec[1] = (unsigned long)arg1; \
3152  _argvec[2] = (unsigned long)arg2; \
3153  _argvec[3] = (unsigned long)arg3; \
3154  _argvec[4] = (unsigned long)arg4; \
3155  _argvec[5] = (unsigned long)arg5; \
3156  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3157  "lg 2, 8(1)\n\t" \
3158  "lg 3,16(1)\n\t" \
3159  "lg 4,24(1)\n\t" \
3160  "lg 5,32(1)\n\t" \
3161  "lg 6,40(1)\n\t" \
3162  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3163  "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3164  : /*out*/ "=d"( _res ) \
3165  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3166  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3167  lval = (__typeof__( lval ))_res; \
3168  } while ( 0 )
3169 
3170 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
3171  do { \
3172  volatile OrigFn _orig = ( orig ); \
3173  volatile unsigned long _argvec[7]; \
3174  volatile unsigned long _res; \
3175  _argvec[0] = (unsigned long)_orig.nraddr; \
3176  _argvec[1] = (unsigned long)arg1; \
3177  _argvec[2] = (unsigned long)arg2; \
3178  _argvec[3] = (unsigned long)arg3; \
3179  _argvec[4] = (unsigned long)arg4; \
3180  _argvec[5] = (unsigned long)arg5; \
3181  _argvec[6] = (unsigned long)arg6; \
3182  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-168\n\t" \
3183  "lg 2, 8(1)\n\t" \
3184  "lg 3,16(1)\n\t" \
3185  "lg 4,24(1)\n\t" \
3186  "lg 5,32(1)\n\t" \
3187  "lg 6,40(1)\n\t" \
3188  "mvc 160(8,15), 48(1)\n\t" \
3189  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3190  "aghi 15,168\n\t" VALGRIND_CFI_EPILOGUE \
3191  : /*out*/ "=d"( _res ) \
3192  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3193  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3194  lval = (__typeof__( lval ))_res; \
3195  } while ( 0 )
3196 
3197 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
3198  do { \
3199  volatile OrigFn _orig = ( orig ); \
3200  volatile unsigned long _argvec[8]; \
3201  volatile unsigned long _res; \
3202  _argvec[0] = (unsigned long)_orig.nraddr; \
3203  _argvec[1] = (unsigned long)arg1; \
3204  _argvec[2] = (unsigned long)arg2; \
3205  _argvec[3] = (unsigned long)arg3; \
3206  _argvec[4] = (unsigned long)arg4; \
3207  _argvec[5] = (unsigned long)arg5; \
3208  _argvec[6] = (unsigned long)arg6; \
3209  _argvec[7] = (unsigned long)arg7; \
3210  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-176\n\t" \
3211  "lg 2, 8(1)\n\t" \
3212  "lg 3,16(1)\n\t" \
3213  "lg 4,24(1)\n\t" \
3214  "lg 5,32(1)\n\t" \
3215  "lg 6,40(1)\n\t" \
3216  "mvc 160(8,15), 48(1)\n\t" \
3217  "mvc 168(8,15), 56(1)\n\t" \
3218  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3219  "aghi 15,176\n\t" VALGRIND_CFI_EPILOGUE \
3220  : /*out*/ "=d"( _res ) \
3221  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3222  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3223  lval = (__typeof__( lval ))_res; \
3224  } while ( 0 )
3225 
3226 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
3227  do { \
3228  volatile OrigFn _orig = ( orig ); \
3229  volatile unsigned long _argvec[9]; \
3230  volatile unsigned long _res; \
3231  _argvec[0] = (unsigned long)_orig.nraddr; \
3232  _argvec[1] = (unsigned long)arg1; \
3233  _argvec[2] = (unsigned long)arg2; \
3234  _argvec[3] = (unsigned long)arg3; \
3235  _argvec[4] = (unsigned long)arg4; \
3236  _argvec[5] = (unsigned long)arg5; \
3237  _argvec[6] = (unsigned long)arg6; \
3238  _argvec[7] = (unsigned long)arg7; \
3239  _argvec[8] = (unsigned long)arg8; \
3240  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-184\n\t" \
3241  "lg 2, 8(1)\n\t" \
3242  "lg 3,16(1)\n\t" \
3243  "lg 4,24(1)\n\t" \
3244  "lg 5,32(1)\n\t" \
3245  "lg 6,40(1)\n\t" \
3246  "mvc 160(8,15), 48(1)\n\t" \
3247  "mvc 168(8,15), 56(1)\n\t" \
3248  "mvc 176(8,15), 64(1)\n\t" \
3249  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3250  "aghi 15,184\n\t" VALGRIND_CFI_EPILOGUE \
3251  : /*out*/ "=d"( _res ) \
3252  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3253  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3254  lval = (__typeof__( lval ))_res; \
3255  } while ( 0 )
3256 
3257 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
3258  do { \
3259  volatile OrigFn _orig = ( orig ); \
3260  volatile unsigned long _argvec[10]; \
3261  volatile unsigned long _res; \
3262  _argvec[0] = (unsigned long)_orig.nraddr; \
3263  _argvec[1] = (unsigned long)arg1; \
3264  _argvec[2] = (unsigned long)arg2; \
3265  _argvec[3] = (unsigned long)arg3; \
3266  _argvec[4] = (unsigned long)arg4; \
3267  _argvec[5] = (unsigned long)arg5; \
3268  _argvec[6] = (unsigned long)arg6; \
3269  _argvec[7] = (unsigned long)arg7; \
3270  _argvec[8] = (unsigned long)arg8; \
3271  _argvec[9] = (unsigned long)arg9; \
3272  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-192\n\t" \
3273  "lg 2, 8(1)\n\t" \
3274  "lg 3,16(1)\n\t" \
3275  "lg 4,24(1)\n\t" \
3276  "lg 5,32(1)\n\t" \
3277  "lg 6,40(1)\n\t" \
3278  "mvc 160(8,15), 48(1)\n\t" \
3279  "mvc 168(8,15), 56(1)\n\t" \
3280  "mvc 176(8,15), 64(1)\n\t" \
3281  "mvc 184(8,15), 72(1)\n\t" \
3282  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3283  "aghi 15,192\n\t" VALGRIND_CFI_EPILOGUE \
3284  : /*out*/ "=d"( _res ) \
3285  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3286  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3287  lval = (__typeof__( lval ))_res; \
3288  } while ( 0 )
3289 
3290 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
3291  do { \
3292  volatile OrigFn _orig = ( orig ); \
3293  volatile unsigned long _argvec[11]; \
3294  volatile unsigned long _res; \
3295  _argvec[0] = (unsigned long)_orig.nraddr; \
3296  _argvec[1] = (unsigned long)arg1; \
3297  _argvec[2] = (unsigned long)arg2; \
3298  _argvec[3] = (unsigned long)arg3; \
3299  _argvec[4] = (unsigned long)arg4; \
3300  _argvec[5] = (unsigned long)arg5; \
3301  _argvec[6] = (unsigned long)arg6; \
3302  _argvec[7] = (unsigned long)arg7; \
3303  _argvec[8] = (unsigned long)arg8; \
3304  _argvec[9] = (unsigned long)arg9; \
3305  _argvec[10] = (unsigned long)arg10; \
3306  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-200\n\t" \
3307  "lg 2, 8(1)\n\t" \
3308  "lg 3,16(1)\n\t" \
3309  "lg 4,24(1)\n\t" \
3310  "lg 5,32(1)\n\t" \
3311  "lg 6,40(1)\n\t" \
3312  "mvc 160(8,15), 48(1)\n\t" \
3313  "mvc 168(8,15), 56(1)\n\t" \
3314  "mvc 176(8,15), 64(1)\n\t" \
3315  "mvc 184(8,15), 72(1)\n\t" \
3316  "mvc 192(8,15), 80(1)\n\t" \
3317  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3318  "aghi 15,200\n\t" VALGRIND_CFI_EPILOGUE \
3319  : /*out*/ "=d"( _res ) \
3320  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3321  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3322  lval = (__typeof__( lval ))_res; \
3323  } while ( 0 )
3324 
3325 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
3326  do { \
3327  volatile OrigFn _orig = ( orig ); \
3328  volatile unsigned long _argvec[12]; \
3329  volatile unsigned long _res; \
3330  _argvec[0] = (unsigned long)_orig.nraddr; \
3331  _argvec[1] = (unsigned long)arg1; \
3332  _argvec[2] = (unsigned long)arg2; \
3333  _argvec[3] = (unsigned long)arg3; \
3334  _argvec[4] = (unsigned long)arg4; \
3335  _argvec[5] = (unsigned long)arg5; \
3336  _argvec[6] = (unsigned long)arg6; \
3337  _argvec[7] = (unsigned long)arg7; \
3338  _argvec[8] = (unsigned long)arg8; \
3339  _argvec[9] = (unsigned long)arg9; \
3340  _argvec[10] = (unsigned long)arg10; \
3341  _argvec[11] = (unsigned long)arg11; \
3342  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-208\n\t" \
3343  "lg 2, 8(1)\n\t" \
3344  "lg 3,16(1)\n\t" \
3345  "lg 4,24(1)\n\t" \
3346  "lg 5,32(1)\n\t" \
3347  "lg 6,40(1)\n\t" \
3348  "mvc 160(8,15), 48(1)\n\t" \
3349  "mvc 168(8,15), 56(1)\n\t" \
3350  "mvc 176(8,15), 64(1)\n\t" \
3351  "mvc 184(8,15), 72(1)\n\t" \
3352  "mvc 192(8,15), 80(1)\n\t" \
3353  "mvc 200(8,15), 88(1)\n\t" \
3354  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3355  "aghi 15,208\n\t" VALGRIND_CFI_EPILOGUE \
3356  : /*out*/ "=d"( _res ) \
3357  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3358  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3359  lval = (__typeof__( lval ))_res; \
3360  } while ( 0 )
3361 
3362 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
3363  do { \
3364  volatile OrigFn _orig = ( orig ); \
3365  volatile unsigned long _argvec[13]; \
3366  volatile unsigned long _res; \
3367  _argvec[0] = (unsigned long)_orig.nraddr; \
3368  _argvec[1] = (unsigned long)arg1; \
3369  _argvec[2] = (unsigned long)arg2; \
3370  _argvec[3] = (unsigned long)arg3; \
3371  _argvec[4] = (unsigned long)arg4; \
3372  _argvec[5] = (unsigned long)arg5; \
3373  _argvec[6] = (unsigned long)arg6; \
3374  _argvec[7] = (unsigned long)arg7; \
3375  _argvec[8] = (unsigned long)arg8; \
3376  _argvec[9] = (unsigned long)arg9; \
3377  _argvec[10] = (unsigned long)arg10; \
3378  _argvec[11] = (unsigned long)arg11; \
3379  _argvec[12] = (unsigned long)arg12; \
3380  __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-216\n\t" \
3381  "lg 2, 8(1)\n\t" \
3382  "lg 3,16(1)\n\t" \
3383  "lg 4,24(1)\n\t" \
3384  "lg 5,32(1)\n\t" \
3385  "lg 6,40(1)\n\t" \
3386  "mvc 160(8,15), 48(1)\n\t" \
3387  "mvc 168(8,15), 56(1)\n\t" \
3388  "mvc 176(8,15), 64(1)\n\t" \
3389  "mvc 184(8,15), 72(1)\n\t" \
3390  "mvc 192(8,15), 80(1)\n\t" \
3391  "mvc 200(8,15), 88(1)\n\t" \
3392  "mvc 208(8,15), 96(1)\n\t" \
3393  "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3394  "aghi 15,216\n\t" VALGRIND_CFI_EPILOGUE \
3395  : /*out*/ "=d"( _res ) \
3396  : /*in*/ "a"(&_argvec[0])__FRAME_POINTER \
3397  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3398  lval = (__typeof__( lval ))_res; \
3399  } while ( 0 )
3400 
3401 # endif /* PLAT_s390x_linux */
3402 
3403 /* ------------------------- mips32-linux ----------------------- */
3404 
3405 # if defined( PLAT_mips32_linux )
3406 
3407 /* These regs are trashed by the hidden call. */
3408 # define __CALLER_SAVED_REGS \
3409  "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31"
3410 
3411 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
3412  long) == 4. */
3413 
3414 # define CALL_FN_W_v( lval, orig ) \
3415  do { \
3416  volatile OrigFn _orig = ( orig ); \
3417  volatile unsigned long _argvec[1]; \
3418  volatile unsigned long _res; \
3419  _argvec[0] = (unsigned long)_orig.nraddr; \
3420  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3421  "sw $28, 0($29) \n\t" \
3422  "sw $31, 4($29) \n\t" \
3423  "subu $29, $29, 16 \n\t" \
3424  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3425  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16\n\t" \
3426  "lw $28, 0($29) \n\t" \
3427  "lw $31, 4($29) \n\t" \
3428  "addu $29, $29, 8 \n\t" \
3429  "move %0, $2\n" \
3430  : /*out*/ "=r"( _res ) \
3431  : /*in*/ "0"( &_argvec[0] ) \
3432  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3433  lval = (__typeof__( lval ))_res; \
3434  } while ( 0 )
3435 
3436 # define CALL_FN_W_W( lval, orig, arg1 ) \
3437  do { \
3438  volatile OrigFn _orig = ( orig ); \
3439  volatile unsigned long _argvec[2]; \
3440  volatile unsigned long _res; \
3441  _argvec[0] = (unsigned long)_orig.nraddr; \
3442  _argvec[1] = (unsigned long)( arg1 ); \
3443  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3444  "sw $28, 0($29) \n\t" \
3445  "sw $31, 4($29) \n\t" \
3446  "subu $29, $29, 16 \n\t" \
3447  "lw $4, 4(%1) \n\t" /* arg1*/ \
3448  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3449  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3450  "lw $28, 0($29) \n\t" \
3451  "lw $31, 4($29) \n\t" \
3452  "addu $29, $29, 8 \n\t" \
3453  "move %0, $2\n" \
3454  : /*out*/ "=r"( _res ) \
3455  : /*in*/ "0"( &_argvec[0] ) \
3456  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3457  lval = (__typeof__( lval ))_res; \
3458  } while ( 0 )
3459 
3460 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3461  do { \
3462  volatile OrigFn _orig = ( orig ); \
3463  volatile unsigned long _argvec[3]; \
3464  volatile unsigned long _res; \
3465  _argvec[0] = (unsigned long)_orig.nraddr; \
3466  _argvec[1] = (unsigned long)( arg1 ); \
3467  _argvec[2] = (unsigned long)( arg2 ); \
3468  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3469  "sw $28, 0($29) \n\t" \
3470  "sw $31, 4($29) \n\t" \
3471  "subu $29, $29, 16 \n\t" \
3472  "lw $4, 4(%1) \n\t" \
3473  "lw $5, 8(%1) \n\t" \
3474  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3475  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3476  "lw $28, 0($29) \n\t" \
3477  "lw $31, 4($29) \n\t" \
3478  "addu $29, $29, 8 \n\t" \
3479  "move %0, $2\n" \
3480  : /*out*/ "=r"( _res ) \
3481  : /*in*/ "0"( &_argvec[0] ) \
3482  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3483  lval = (__typeof__( lval ))_res; \
3484  } while ( 0 )
3485 
3486 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3487  do { \
3488  volatile OrigFn _orig = ( orig ); \
3489  volatile unsigned long _argvec[4]; \
3490  volatile unsigned long _res; \
3491  _argvec[0] = (unsigned long)_orig.nraddr; \
3492  _argvec[1] = (unsigned long)( arg1 ); \
3493  _argvec[2] = (unsigned long)( arg2 ); \
3494  _argvec[3] = (unsigned long)( arg3 ); \
3495  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3496  "sw $28, 0($29) \n\t" \
3497  "sw $31, 4($29) \n\t" \
3498  "subu $29, $29, 16 \n\t" \
3499  "lw $4, 4(%1) \n\t" \
3500  "lw $5, 8(%1) \n\t" \
3501  "lw $6, 12(%1) \n\t" \
3502  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3503  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3504  "lw $28, 0($29) \n\t" \
3505  "lw $31, 4($29) \n\t" \
3506  "addu $29, $29, 8 \n\t" \
3507  "move %0, $2\n" \
3508  : /*out*/ "=r"( _res ) \
3509  : /*in*/ "0"( &_argvec[0] ) \
3510  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3511  lval = (__typeof__( lval ))_res; \
3512  } while ( 0 )
3513 
3514 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3515  do { \
3516  volatile OrigFn _orig = ( orig ); \
3517  volatile unsigned long _argvec[5]; \
3518  volatile unsigned long _res; \
3519  _argvec[0] = (unsigned long)_orig.nraddr; \
3520  _argvec[1] = (unsigned long)( arg1 ); \
3521  _argvec[2] = (unsigned long)( arg2 ); \
3522  _argvec[3] = (unsigned long)( arg3 ); \
3523  _argvec[4] = (unsigned long)( arg4 ); \
3524  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3525  "sw $28, 0($29) \n\t" \
3526  "sw $31, 4($29) \n\t" \
3527  "subu $29, $29, 16 \n\t" \
3528  "lw $4, 4(%1) \n\t" \
3529  "lw $5, 8(%1) \n\t" \
3530  "lw $6, 12(%1) \n\t" \
3531  "lw $7, 16(%1) \n\t" \
3532  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3533  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3534  "lw $28, 0($29) \n\t" \
3535  "lw $31, 4($29) \n\t" \
3536  "addu $29, $29, 8 \n\t" \
3537  "move %0, $2\n" \
3538  : /*out*/ "=r"( _res ) \
3539  : /*in*/ "0"( &_argvec[0] ) \
3540  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3541  lval = (__typeof__( lval ))_res; \
3542  } while ( 0 )
3543 
3544 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3545  do { \
3546  volatile OrigFn _orig = ( orig ); \
3547  volatile unsigned long _argvec[6]; \
3548  volatile unsigned long _res; \
3549  _argvec[0] = (unsigned long)_orig.nraddr; \
3550  _argvec[1] = (unsigned long)( arg1 ); \
3551  _argvec[2] = (unsigned long)( arg2 ); \
3552  _argvec[3] = (unsigned long)( arg3 ); \
3553  _argvec[4] = (unsigned long)( arg4 ); \
3554  _argvec[5] = (unsigned long)( arg5 ); \
3555  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3556  "sw $28, 0($29) \n\t" \
3557  "sw $31, 4($29) \n\t" \
3558  "lw $4, 20(%1) \n\t" \
3559  "subu $29, $29, 24\n\t" \
3560  "sw $4, 16($29) \n\t" \
3561  "lw $4, 4(%1) \n\t" \
3562  "lw $5, 8(%1) \n\t" \
3563  "lw $6, 12(%1) \n\t" \
3564  "lw $7, 16(%1) \n\t" \
3565  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3566  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 24 \n\t" \
3567  "lw $28, 0($29) \n\t" \
3568  "lw $31, 4($29) \n\t" \
3569  "addu $29, $29, 8 \n\t" \
3570  "move %0, $2\n" \
3571  : /*out*/ "=r"( _res ) \
3572  : /*in*/ "0"( &_argvec[0] ) \
3573  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3574  lval = (__typeof__( lval ))_res; \
3575  } while ( 0 )
3576 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
3577  do { \
3578  volatile OrigFn _orig = ( orig ); \
3579  volatile unsigned long _argvec[7]; \
3580  volatile unsigned long _res; \
3581  _argvec[0] = (unsigned long)_orig.nraddr; \
3582  _argvec[1] = (unsigned long)( arg1 ); \
3583  _argvec[2] = (unsigned long)( arg2 ); \
3584  _argvec[3] = (unsigned long)( arg3 ); \
3585  _argvec[4] = (unsigned long)( arg4 ); \
3586  _argvec[5] = (unsigned long)( arg5 ); \
3587  _argvec[6] = (unsigned long)( arg6 ); \
3588  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3589  "sw $28, 0($29) \n\t" \
3590  "sw $31, 4($29) \n\t" \
3591  "lw $4, 20(%1) \n\t" \
3592  "subu $29, $29, 32\n\t" \
3593  "sw $4, 16($29) \n\t" \
3594  "lw $4, 24(%1) \n\t" \
3595  "nop\n\t" \
3596  "sw $4, 20($29) \n\t" \
3597  "lw $4, 4(%1) \n\t" \
3598  "lw $5, 8(%1) \n\t" \
3599  "lw $6, 12(%1) \n\t" \
3600  "lw $7, 16(%1) \n\t" \
3601  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3602  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \
3603  "lw $28, 0($29) \n\t" \
3604  "lw $31, 4($29) \n\t" \
3605  "addu $29, $29, 8 \n\t" \
3606  "move %0, $2\n" \
3607  : /*out*/ "=r"( _res ) \
3608  : /*in*/ "0"( &_argvec[0] ) \
3609  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3610  lval = (__typeof__( lval ))_res; \
3611  } while ( 0 )
3612 
3613 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
3614  do { \
3615  volatile OrigFn _orig = ( orig ); \
3616  volatile unsigned long _argvec[8]; \
3617  volatile unsigned long _res; \
3618  _argvec[0] = (unsigned long)_orig.nraddr; \
3619  _argvec[1] = (unsigned long)( arg1 ); \
3620  _argvec[2] = (unsigned long)( arg2 ); \
3621  _argvec[3] = (unsigned long)( arg3 ); \
3622  _argvec[4] = (unsigned long)( arg4 ); \
3623  _argvec[5] = (unsigned long)( arg5 ); \
3624  _argvec[6] = (unsigned long)( arg6 ); \
3625  _argvec[7] = (unsigned long)( arg7 ); \
3626  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3627  "sw $28, 0($29) \n\t" \
3628  "sw $31, 4($29) \n\t" \
3629  "lw $4, 20(%1) \n\t" \
3630  "subu $29, $29, 32\n\t" \
3631  "sw $4, 16($29) \n\t" \
3632  "lw $4, 24(%1) \n\t" \
3633  "sw $4, 20($29) \n\t" \
3634  "lw $4, 28(%1) \n\t" \
3635  "sw $4, 24($29) \n\t" \
3636  "lw $4, 4(%1) \n\t" \
3637  "lw $5, 8(%1) \n\t" \
3638  "lw $6, 12(%1) \n\t" \
3639  "lw $7, 16(%1) \n\t" \
3640  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3641  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \
3642  "lw $28, 0($29) \n\t" \
3643  "lw $31, 4($29) \n\t" \
3644  "addu $29, $29, 8 \n\t" \
3645  "move %0, $2\n" \
3646  : /*out*/ "=r"( _res ) \
3647  : /*in*/ "0"( &_argvec[0] ) \
3648  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3649  lval = (__typeof__( lval ))_res; \
3650  } while ( 0 )
3651 
3652 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
3653  do { \
3654  volatile OrigFn _orig = ( orig ); \
3655  volatile unsigned long _argvec[9]; \
3656  volatile unsigned long _res; \
3657  _argvec[0] = (unsigned long)_orig.nraddr; \
3658  _argvec[1] = (unsigned long)( arg1 ); \
3659  _argvec[2] = (unsigned long)( arg2 ); \
3660  _argvec[3] = (unsigned long)( arg3 ); \
3661  _argvec[4] = (unsigned long)( arg4 ); \
3662  _argvec[5] = (unsigned long)( arg5 ); \
3663  _argvec[6] = (unsigned long)( arg6 ); \
3664  _argvec[7] = (unsigned long)( arg7 ); \
3665  _argvec[8] = (unsigned long)( arg8 ); \
3666  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3667  "sw $28, 0($29) \n\t" \
3668  "sw $31, 4($29) \n\t" \
3669  "lw $4, 20(%1) \n\t" \
3670  "subu $29, $29, 40\n\t" \
3671  "sw $4, 16($29) \n\t" \
3672  "lw $4, 24(%1) \n\t" \
3673  "sw $4, 20($29) \n\t" \
3674  "lw $4, 28(%1) \n\t" \
3675  "sw $4, 24($29) \n\t" \
3676  "lw $4, 32(%1) \n\t" \
3677  "sw $4, 28($29) \n\t" \
3678  "lw $4, 4(%1) \n\t" \
3679  "lw $5, 8(%1) \n\t" \
3680  "lw $6, 12(%1) \n\t" \
3681  "lw $7, 16(%1) \n\t" \
3682  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3683  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \
3684  "lw $28, 0($29) \n\t" \
3685  "lw $31, 4($29) \n\t" \
3686  "addu $29, $29, 8 \n\t" \
3687  "move %0, $2\n" \
3688  : /*out*/ "=r"( _res ) \
3689  : /*in*/ "0"( &_argvec[0] ) \
3690  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3691  lval = (__typeof__( lval ))_res; \
3692  } while ( 0 )
3693 
3694 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
3695  do { \
3696  volatile OrigFn _orig = ( orig ); \
3697  volatile unsigned long _argvec[10]; \
3698  volatile unsigned long _res; \
3699  _argvec[0] = (unsigned long)_orig.nraddr; \
3700  _argvec[1] = (unsigned long)( arg1 ); \
3701  _argvec[2] = (unsigned long)( arg2 ); \
3702  _argvec[3] = (unsigned long)( arg3 ); \
3703  _argvec[4] = (unsigned long)( arg4 ); \
3704  _argvec[5] = (unsigned long)( arg5 ); \
3705  _argvec[6] = (unsigned long)( arg6 ); \
3706  _argvec[7] = (unsigned long)( arg7 ); \
3707  _argvec[8] = (unsigned long)( arg8 ); \
3708  _argvec[9] = (unsigned long)( arg9 ); \
3709  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3710  "sw $28, 0($29) \n\t" \
3711  "sw $31, 4($29) \n\t" \
3712  "lw $4, 20(%1) \n\t" \
3713  "subu $29, $29, 40\n\t" \
3714  "sw $4, 16($29) \n\t" \
3715  "lw $4, 24(%1) \n\t" \
3716  "sw $4, 20($29) \n\t" \
3717  "lw $4, 28(%1) \n\t" \
3718  "sw $4, 24($29) \n\t" \
3719  "lw $4, 32(%1) \n\t" \
3720  "sw $4, 28($29) \n\t" \
3721  "lw $4, 36(%1) \n\t" \
3722  "sw $4, 32($29) \n\t" \
3723  "lw $4, 4(%1) \n\t" \
3724  "lw $5, 8(%1) \n\t" \
3725  "lw $6, 12(%1) \n\t" \
3726  "lw $7, 16(%1) \n\t" \
3727  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3728  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \
3729  "lw $28, 0($29) \n\t" \
3730  "lw $31, 4($29) \n\t" \
3731  "addu $29, $29, 8 \n\t" \
3732  "move %0, $2\n" \
3733  : /*out*/ "=r"( _res ) \
3734  : /*in*/ "0"( &_argvec[0] ) \
3735  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3736  lval = (__typeof__( lval ))_res; \
3737  } while ( 0 )
3738 
3739 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
3740  do { \
3741  volatile OrigFn _orig = ( orig ); \
3742  volatile unsigned long _argvec[11]; \
3743  volatile unsigned long _res; \
3744  _argvec[0] = (unsigned long)_orig.nraddr; \
3745  _argvec[1] = (unsigned long)( arg1 ); \
3746  _argvec[2] = (unsigned long)( arg2 ); \
3747  _argvec[3] = (unsigned long)( arg3 ); \
3748  _argvec[4] = (unsigned long)( arg4 ); \
3749  _argvec[5] = (unsigned long)( arg5 ); \
3750  _argvec[6] = (unsigned long)( arg6 ); \
3751  _argvec[7] = (unsigned long)( arg7 ); \
3752  _argvec[8] = (unsigned long)( arg8 ); \
3753  _argvec[9] = (unsigned long)( arg9 ); \
3754  _argvec[10] = (unsigned long)( arg10 ); \
3755  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3756  "sw $28, 0($29) \n\t" \
3757  "sw $31, 4($29) \n\t" \
3758  "lw $4, 20(%1) \n\t" \
3759  "subu $29, $29, 48\n\t" \
3760  "sw $4, 16($29) \n\t" \
3761  "lw $4, 24(%1) \n\t" \
3762  "sw $4, 20($29) \n\t" \
3763  "lw $4, 28(%1) \n\t" \
3764  "sw $4, 24($29) \n\t" \
3765  "lw $4, 32(%1) \n\t" \
3766  "sw $4, 28($29) \n\t" \
3767  "lw $4, 36(%1) \n\t" \
3768  "sw $4, 32($29) \n\t" \
3769  "lw $4, 40(%1) \n\t" \
3770  "sw $4, 36($29) \n\t" \
3771  "lw $4, 4(%1) \n\t" \
3772  "lw $5, 8(%1) \n\t" \
3773  "lw $6, 12(%1) \n\t" \
3774  "lw $7, 16(%1) \n\t" \
3775  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3776  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \
3777  "lw $28, 0($29) \n\t" \
3778  "lw $31, 4($29) \n\t" \
3779  "addu $29, $29, 8 \n\t" \
3780  "move %0, $2\n" \
3781  : /*out*/ "=r"( _res ) \
3782  : /*in*/ "0"( &_argvec[0] ) \
3783  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3784  lval = (__typeof__( lval ))_res; \
3785  } while ( 0 )
3786 
3787 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
3788  do { \
3789  volatile OrigFn _orig = ( orig ); \
3790  volatile unsigned long _argvec[12]; \
3791  volatile unsigned long _res; \
3792  _argvec[0] = (unsigned long)_orig.nraddr; \
3793  _argvec[1] = (unsigned long)( arg1 ); \
3794  _argvec[2] = (unsigned long)( arg2 ); \
3795  _argvec[3] = (unsigned long)( arg3 ); \
3796  _argvec[4] = (unsigned long)( arg4 ); \
3797  _argvec[5] = (unsigned long)( arg5 ); \
3798  _argvec[6] = (unsigned long)( arg6 ); \
3799  _argvec[7] = (unsigned long)( arg7 ); \
3800  _argvec[8] = (unsigned long)( arg8 ); \
3801  _argvec[9] = (unsigned long)( arg9 ); \
3802  _argvec[10] = (unsigned long)( arg10 ); \
3803  _argvec[11] = (unsigned long)( arg11 ); \
3804  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3805  "sw $28, 0($29) \n\t" \
3806  "sw $31, 4($29) \n\t" \
3807  "lw $4, 20(%1) \n\t" \
3808  "subu $29, $29, 48\n\t" \
3809  "sw $4, 16($29) \n\t" \
3810  "lw $4, 24(%1) \n\t" \
3811  "sw $4, 20($29) \n\t" \
3812  "lw $4, 28(%1) \n\t" \
3813  "sw $4, 24($29) \n\t" \
3814  "lw $4, 32(%1) \n\t" \
3815  "sw $4, 28($29) \n\t" \
3816  "lw $4, 36(%1) \n\t" \
3817  "sw $4, 32($29) \n\t" \
3818  "lw $4, 40(%1) \n\t" \
3819  "sw $4, 36($29) \n\t" \
3820  "lw $4, 44(%1) \n\t" \
3821  "sw $4, 40($29) \n\t" \
3822  "lw $4, 4(%1) \n\t" \
3823  "lw $5, 8(%1) \n\t" \
3824  "lw $6, 12(%1) \n\t" \
3825  "lw $7, 16(%1) \n\t" \
3826  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3827  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \
3828  "lw $28, 0($29) \n\t" \
3829  "lw $31, 4($29) \n\t" \
3830  "addu $29, $29, 8 \n\t" \
3831  "move %0, $2\n" \
3832  : /*out*/ "=r"( _res ) \
3833  : /*in*/ "0"( &_argvec[0] ) \
3834  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3835  lval = (__typeof__( lval ))_res; \
3836  } while ( 0 )
3837 
3838 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
3839  do { \
3840  volatile OrigFn _orig = ( orig ); \
3841  volatile unsigned long _argvec[13]; \
3842  volatile unsigned long _res; \
3843  _argvec[0] = (unsigned long)_orig.nraddr; \
3844  _argvec[1] = (unsigned long)( arg1 ); \
3845  _argvec[2] = (unsigned long)( arg2 ); \
3846  _argvec[3] = (unsigned long)( arg3 ); \
3847  _argvec[4] = (unsigned long)( arg4 ); \
3848  _argvec[5] = (unsigned long)( arg5 ); \
3849  _argvec[6] = (unsigned long)( arg6 ); \
3850  _argvec[7] = (unsigned long)( arg7 ); \
3851  _argvec[8] = (unsigned long)( arg8 ); \
3852  _argvec[9] = (unsigned long)( arg9 ); \
3853  _argvec[10] = (unsigned long)( arg10 ); \
3854  _argvec[11] = (unsigned long)( arg11 ); \
3855  _argvec[12] = (unsigned long)( arg12 ); \
3856  __asm__ volatile( "subu $29, $29, 8 \n\t" \
3857  "sw $28, 0($29) \n\t" \
3858  "sw $31, 4($29) \n\t" \
3859  "lw $4, 20(%1) \n\t" \
3860  "subu $29, $29, 56\n\t" \
3861  "sw $4, 16($29) \n\t" \
3862  "lw $4, 24(%1) \n\t" \
3863  "sw $4, 20($29) \n\t" \
3864  "lw $4, 28(%1) \n\t" \
3865  "sw $4, 24($29) \n\t" \
3866  "lw $4, 32(%1) \n\t" \
3867  "sw $4, 28($29) \n\t" \
3868  "lw $4, 36(%1) \n\t" \
3869  "sw $4, 32($29) \n\t" \
3870  "lw $4, 40(%1) \n\t" \
3871  "sw $4, 36($29) \n\t" \
3872  "lw $4, 44(%1) \n\t" \
3873  "sw $4, 40($29) \n\t" \
3874  "lw $4, 48(%1) \n\t" \
3875  "sw $4, 44($29) \n\t" \
3876  "lw $4, 4(%1) \n\t" \
3877  "lw $5, 8(%1) \n\t" \
3878  "lw $6, 12(%1) \n\t" \
3879  "lw $7, 16(%1) \n\t" \
3880  "lw $25, 0(%1) \n\t" /* target->t9 */ \
3881  VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 56 \n\t" \
3882  "lw $28, 0($29) \n\t" \
3883  "lw $31, 4($29) \n\t" \
3884  "addu $29, $29, 8 \n\t" \
3885  "move %0, $2\n" \
3886  : /*out*/ "=r"( _res ) \
3887  : /*in*/ "r"( &_argvec[0] ) \
3888  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3889  lval = (__typeof__( lval ))_res; \
3890  } while ( 0 )
3891 
3892 # endif /* PLAT_mips32_linux */
3893 
3894 /* ------------------------- mips64-linux ------------------------- */
3895 
3896 # if defined( PLAT_mips64_linux )
3897 
3898 /* These regs are trashed by the hidden call. */
3899 # define __CALLER_SAVED_REGS \
3900  "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31"
3901 
3902 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
3903  long) == 4. */
3904 
3905 # define CALL_FN_W_v( lval, orig ) \
3906  do { \
3907  volatile OrigFn _orig = ( orig ); \
3908  volatile unsigned long _argvec[1]; \
3909  volatile unsigned long _res; \
3910  _argvec[0] = (unsigned long)_orig.nraddr; \
3911  __asm__ volatile( "ld $25, 0(%1)\n\t" /* target->t9 */ \
3912  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3913  : /*out*/ "=r"( _res ) \
3914  : /*in*/ "0"( &_argvec[0] ) \
3915  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3916  lval = (__typeof__( lval ))_res; \
3917  } while ( 0 )
3918 
3919 # define CALL_FN_W_W( lval, orig, arg1 ) \
3920  do { \
3921  volatile OrigFn _orig = ( orig ); \
3922  volatile unsigned long _argvec[2]; \
3923  volatile unsigned long _res; \
3924  _argvec[0] = (unsigned long)_orig.nraddr; \
3925  _argvec[1] = (unsigned long)( arg1 ); \
3926  __asm__ volatile( "ld $4, 8(%1)\n\t" /* arg1*/ \
3927  "ld $25, 0(%1)\n\t" /* target->t9 */ \
3928  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3929  : /*out*/ "=r"( _res ) \
3930  : /*in*/ "r"( &_argvec[0] ) \
3931  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3932  lval = (__typeof__( lval ))_res; \
3933  } while ( 0 )
3934 
3935 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3936  do { \
3937  volatile OrigFn _orig = ( orig ); \
3938  volatile unsigned long _argvec[3]; \
3939  volatile unsigned long _res; \
3940  _argvec[0] = (unsigned long)_orig.nraddr; \
3941  _argvec[1] = (unsigned long)( arg1 ); \
3942  _argvec[2] = (unsigned long)( arg2 ); \
3943  __asm__ volatile( "ld $4, 8(%1)\n\t" \
3944  "ld $5, 16(%1)\n\t" \
3945  "ld $25, 0(%1)\n\t" /* target->t9 */ \
3946  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3947  : /*out*/ "=r"( _res ) \
3948  : /*in*/ "r"( &_argvec[0] ) \
3949  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3950  lval = (__typeof__( lval ))_res; \
3951  } while ( 0 )
3952 
3953 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3954  do { \
3955  volatile OrigFn _orig = ( orig ); \
3956  volatile unsigned long _argvec[4]; \
3957  volatile unsigned long _res; \
3958  _argvec[0] = (unsigned long)_orig.nraddr; \
3959  _argvec[1] = (unsigned long)( arg1 ); \
3960  _argvec[2] = (unsigned long)( arg2 ); \
3961  _argvec[3] = (unsigned long)( arg3 ); \
3962  __asm__ volatile( "ld $4, 8(%1)\n\t" \
3963  "ld $5, 16(%1)\n\t" \
3964  "ld $6, 24(%1)\n\t" \
3965  "ld $25, 0(%1)\n\t" /* target->t9 */ \
3966  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3967  : /*out*/ "=r"( _res ) \
3968  : /*in*/ "r"( &_argvec[0] ) \
3969  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3970  lval = (__typeof__( lval ))_res; \
3971  } while ( 0 )
3972 
3973 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3974  do { \
3975  volatile OrigFn _orig = ( orig ); \
3976  volatile unsigned long _argvec[5]; \
3977  volatile unsigned long _res; \
3978  _argvec[0] = (unsigned long)_orig.nraddr; \
3979  _argvec[1] = (unsigned long)( arg1 ); \
3980  _argvec[2] = (unsigned long)( arg2 ); \
3981  _argvec[3] = (unsigned long)( arg3 ); \
3982  _argvec[4] = (unsigned long)( arg4 ); \
3983  __asm__ volatile( "ld $4, 8(%1)\n\t" \
3984  "ld $5, 16(%1)\n\t" \
3985  "ld $6, 24(%1)\n\t" \
3986  "ld $7, 32(%1)\n\t" \
3987  "ld $25, 0(%1)\n\t" /* target->t9 */ \
3988  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3989  : /*out*/ "=r"( _res ) \
3990  : /*in*/ "r"( &_argvec[0] ) \
3991  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
3992  lval = (__typeof__( lval ))_res; \
3993  } while ( 0 )
3994 
3995 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3996  do { \
3997  volatile OrigFn _orig = ( orig ); \
3998  volatile unsigned long _argvec[6]; \
3999  volatile unsigned long _res; \
4000  _argvec[0] = (unsigned long)_orig.nraddr; \
4001  _argvec[1] = (unsigned long)( arg1 ); \
4002  _argvec[2] = (unsigned long)( arg2 ); \
4003  _argvec[3] = (unsigned long)( arg3 ); \
4004  _argvec[4] = (unsigned long)( arg4 ); \
4005  _argvec[5] = (unsigned long)( arg5 ); \
4006  __asm__ volatile( "ld $4, 8(%1)\n\t" \
4007  "ld $5, 16(%1)\n\t" \
4008  "ld $6, 24(%1)\n\t" \
4009  "ld $7, 32(%1)\n\t" \
4010  "ld $8, 40(%1)\n\t" \
4011  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4012  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4013  : /*out*/ "=r"( _res ) \
4014  : /*in*/ "r"( &_argvec[0] ) \
4015  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4016  lval = (__typeof__( lval ))_res; \
4017  } while ( 0 )
4018 
4019 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
4020  do { \
4021  volatile OrigFn _orig = ( orig ); \
4022  volatile unsigned long _argvec[7]; \
4023  volatile unsigned long _res; \
4024  _argvec[0] = (unsigned long)_orig.nraddr; \
4025  _argvec[1] = (unsigned long)( arg1 ); \
4026  _argvec[2] = (unsigned long)( arg2 ); \
4027  _argvec[3] = (unsigned long)( arg3 ); \
4028  _argvec[4] = (unsigned long)( arg4 ); \
4029  _argvec[5] = (unsigned long)( arg5 ); \
4030  _argvec[6] = (unsigned long)( arg6 ); \
4031  __asm__ volatile( "ld $4, 8(%1)\n\t" \
4032  "ld $5, 16(%1)\n\t" \
4033  "ld $6, 24(%1)\n\t" \
4034  "ld $7, 32(%1)\n\t" \
4035  "ld $8, 40(%1)\n\t" \
4036  "ld $9, 48(%1)\n\t" \
4037  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4038  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4039  : /*out*/ "=r"( _res ) \
4040  : /*in*/ "r"( &_argvec[0] ) \
4041  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4042  lval = (__typeof__( lval ))_res; \
4043  } while ( 0 )
4044 
4045 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
4046  do { \
4047  volatile OrigFn _orig = ( orig ); \
4048  volatile unsigned long _argvec[8]; \
4049  volatile unsigned long _res; \
4050  _argvec[0] = (unsigned long)_orig.nraddr; \
4051  _argvec[1] = (unsigned long)( arg1 ); \
4052  _argvec[2] = (unsigned long)( arg2 ); \
4053  _argvec[3] = (unsigned long)( arg3 ); \
4054  _argvec[4] = (unsigned long)( arg4 ); \
4055  _argvec[5] = (unsigned long)( arg5 ); \
4056  _argvec[6] = (unsigned long)( arg6 ); \
4057  _argvec[7] = (unsigned long)( arg7 ); \
4058  __asm__ volatile( "ld $4, 8(%1)\n\t" \
4059  "ld $5, 16(%1)\n\t" \
4060  "ld $6, 24(%1)\n\t" \
4061  "ld $7, 32(%1)\n\t" \
4062  "ld $8, 40(%1)\n\t" \
4063  "ld $9, 48(%1)\n\t" \
4064  "ld $10, 56(%1)\n\t" \
4065  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4066  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4067  : /*out*/ "=r"( _res ) \
4068  : /*in*/ "r"( &_argvec[0] ) \
4069  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4070  lval = (__typeof__( lval ))_res; \
4071  } while ( 0 )
4072 
4073 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
4074  do { \
4075  volatile OrigFn _orig = ( orig ); \
4076  volatile unsigned long _argvec[9]; \
4077  volatile unsigned long _res; \
4078  _argvec[0] = (unsigned long)_orig.nraddr; \
4079  _argvec[1] = (unsigned long)( arg1 ); \
4080  _argvec[2] = (unsigned long)( arg2 ); \
4081  _argvec[3] = (unsigned long)( arg3 ); \
4082  _argvec[4] = (unsigned long)( arg4 ); \
4083  _argvec[5] = (unsigned long)( arg5 ); \
4084  _argvec[6] = (unsigned long)( arg6 ); \
4085  _argvec[7] = (unsigned long)( arg7 ); \
4086  _argvec[8] = (unsigned long)( arg8 ); \
4087  __asm__ volatile( "ld $4, 8(%1)\n\t" \
4088  "ld $5, 16(%1)\n\t" \
4089  "ld $6, 24(%1)\n\t" \
4090  "ld $7, 32(%1)\n\t" \
4091  "ld $8, 40(%1)\n\t" \
4092  "ld $9, 48(%1)\n\t" \
4093  "ld $10, 56(%1)\n\t" \
4094  "ld $11, 64(%1)\n\t" \
4095  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4096  VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4097  : /*out*/ "=r"( _res ) \
4098  : /*in*/ "r"( &_argvec[0] ) \
4099  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4100  lval = (__typeof__( lval ))_res; \
4101  } while ( 0 )
4102 
4103 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
4104  do { \
4105  volatile OrigFn _orig = ( orig ); \
4106  volatile unsigned long _argvec[10]; \
4107  volatile unsigned long _res; \
4108  _argvec[0] = (unsigned long)_orig.nraddr; \
4109  _argvec[1] = (unsigned long)( arg1 ); \
4110  _argvec[2] = (unsigned long)( arg2 ); \
4111  _argvec[3] = (unsigned long)( arg3 ); \
4112  _argvec[4] = (unsigned long)( arg4 ); \
4113  _argvec[5] = (unsigned long)( arg5 ); \
4114  _argvec[6] = (unsigned long)( arg6 ); \
4115  _argvec[7] = (unsigned long)( arg7 ); \
4116  _argvec[8] = (unsigned long)( arg8 ); \
4117  _argvec[9] = (unsigned long)( arg9 ); \
4118  __asm__ volatile( "dsubu $29, $29, 8\n\t" \
4119  "ld $4, 72(%1)\n\t" \
4120  "sd $4, 0($29)\n\t" \
4121  "ld $4, 8(%1)\n\t" \
4122  "ld $5, 16(%1)\n\t" \
4123  "ld $6, 24(%1)\n\t" \
4124  "ld $7, 32(%1)\n\t" \
4125  "ld $8, 40(%1)\n\t" \
4126  "ld $9, 48(%1)\n\t" \
4127  "ld $10, 56(%1)\n\t" \
4128  "ld $11, 64(%1)\n\t" \
4129  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4130  VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 8\n\t" \
4131  "move %0, $2\n" \
4132  : /*out*/ "=r"( _res ) \
4133  : /*in*/ "r"( &_argvec[0] ) \
4134  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4135  lval = (__typeof__( lval ))_res; \
4136  } while ( 0 )
4137 
4138 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
4139  do { \
4140  volatile OrigFn _orig = ( orig ); \
4141  volatile unsigned long _argvec[11]; \
4142  volatile unsigned long _res; \
4143  _argvec[0] = (unsigned long)_orig.nraddr; \
4144  _argvec[1] = (unsigned long)( arg1 ); \
4145  _argvec[2] = (unsigned long)( arg2 ); \
4146  _argvec[3] = (unsigned long)( arg3 ); \
4147  _argvec[4] = (unsigned long)( arg4 ); \
4148  _argvec[5] = (unsigned long)( arg5 ); \
4149  _argvec[6] = (unsigned long)( arg6 ); \
4150  _argvec[7] = (unsigned long)( arg7 ); \
4151  _argvec[8] = (unsigned long)( arg8 ); \
4152  _argvec[9] = (unsigned long)( arg9 ); \
4153  _argvec[10] = (unsigned long)( arg10 ); \
4154  __asm__ volatile( "dsubu $29, $29, 16\n\t" \
4155  "ld $4, 72(%1)\n\t" \
4156  "sd $4, 0($29)\n\t" \
4157  "ld $4, 80(%1)\n\t" \
4158  "sd $4, 8($29)\n\t" \
4159  "ld $4, 8(%1)\n\t" \
4160  "ld $5, 16(%1)\n\t" \
4161  "ld $6, 24(%1)\n\t" \
4162  "ld $7, 32(%1)\n\t" \
4163  "ld $8, 40(%1)\n\t" \
4164  "ld $9, 48(%1)\n\t" \
4165  "ld $10, 56(%1)\n\t" \
4166  "ld $11, 64(%1)\n\t" \
4167  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4168  VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 16\n\t" \
4169  "move %0, $2\n" \
4170  : /*out*/ "=r"( _res ) \
4171  : /*in*/ "r"( &_argvec[0] ) \
4172  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4173  lval = (__typeof__( lval ))_res; \
4174  } while ( 0 )
4175 
4176 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
4177  do { \
4178  volatile OrigFn _orig = ( orig ); \
4179  volatile unsigned long _argvec[12]; \
4180  volatile unsigned long _res; \
4181  _argvec[0] = (unsigned long)_orig.nraddr; \
4182  _argvec[1] = (unsigned long)( arg1 ); \
4183  _argvec[2] = (unsigned long)( arg2 ); \
4184  _argvec[3] = (unsigned long)( arg3 ); \
4185  _argvec[4] = (unsigned long)( arg4 ); \
4186  _argvec[5] = (unsigned long)( arg5 ); \
4187  _argvec[6] = (unsigned long)( arg6 ); \
4188  _argvec[7] = (unsigned long)( arg7 ); \
4189  _argvec[8] = (unsigned long)( arg8 ); \
4190  _argvec[9] = (unsigned long)( arg9 ); \
4191  _argvec[10] = (unsigned long)( arg10 ); \
4192  _argvec[11] = (unsigned long)( arg11 ); \
4193  __asm__ volatile( "dsubu $29, $29, 24\n\t" \
4194  "ld $4, 72(%1)\n\t" \
4195  "sd $4, 0($29)\n\t" \
4196  "ld $4, 80(%1)\n\t" \
4197  "sd $4, 8($29)\n\t" \
4198  "ld $4, 88(%1)\n\t" \
4199  "sd $4, 16($29)\n\t" \
4200  "ld $4, 8(%1)\n\t" \
4201  "ld $5, 16(%1)\n\t" \
4202  "ld $6, 24(%1)\n\t" \
4203  "ld $7, 32(%1)\n\t" \
4204  "ld $8, 40(%1)\n\t" \
4205  "ld $9, 48(%1)\n\t" \
4206  "ld $10, 56(%1)\n\t" \
4207  "ld $11, 64(%1)\n\t" \
4208  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4209  VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 24\n\t" \
4210  "move %0, $2\n" \
4211  : /*out*/ "=r"( _res ) \
4212  : /*in*/ "r"( &_argvec[0] ) \
4213  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4214  lval = (__typeof__( lval ))_res; \
4215  } while ( 0 )
4216 
4217 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
4218  do { \
4219  volatile OrigFn _orig = ( orig ); \
4220  volatile unsigned long _argvec[13]; \
4221  volatile unsigned long _res; \
4222  _argvec[0] = (unsigned long)_orig.nraddr; \
4223  _argvec[1] = (unsigned long)( arg1 ); \
4224  _argvec[2] = (unsigned long)( arg2 ); \
4225  _argvec[3] = (unsigned long)( arg3 ); \
4226  _argvec[4] = (unsigned long)( arg4 ); \
4227  _argvec[5] = (unsigned long)( arg5 ); \
4228  _argvec[6] = (unsigned long)( arg6 ); \
4229  _argvec[7] = (unsigned long)( arg7 ); \
4230  _argvec[8] = (unsigned long)( arg8 ); \
4231  _argvec[9] = (unsigned long)( arg9 ); \
4232  _argvec[10] = (unsigned long)( arg10 ); \
4233  _argvec[11] = (unsigned long)( arg11 ); \
4234  _argvec[12] = (unsigned long)( arg12 ); \
4235  __asm__ volatile( "dsubu $29, $29, 32\n\t" \
4236  "ld $4, 72(%1)\n\t" \
4237  "sd $4, 0($29)\n\t" \
4238  "ld $4, 80(%1)\n\t" \
4239  "sd $4, 8($29)\n\t" \
4240  "ld $4, 88(%1)\n\t" \
4241  "sd $4, 16($29)\n\t" \
4242  "ld $4, 96(%1)\n\t" \
4243  "sd $4, 24($29)\n\t" \
4244  "ld $4, 8(%1)\n\t" \
4245  "ld $5, 16(%1)\n\t" \
4246  "ld $6, 24(%1)\n\t" \
4247  "ld $7, 32(%1)\n\t" \
4248  "ld $8, 40(%1)\n\t" \
4249  "ld $9, 48(%1)\n\t" \
4250  "ld $10, 56(%1)\n\t" \
4251  "ld $11, 64(%1)\n\t" \
4252  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4253  VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 32\n\t" \
4254  "move %0, $2\n" \
4255  : /*out*/ "=r"( _res ) \
4256  : /*in*/ "r"( &_argvec[0] ) \
4257  : /*trash*/ "memory", __CALLER_SAVED_REGS ); \
4258  lval = (__typeof__( lval ))_res; \
4259  } while ( 0 )
4260 
4261 # endif /* PLAT_mips64_linux */
4262 
4263 /* ------------------------------------------------------------------ */
4264 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
4265 /* */
4266 /* ------------------------------------------------------------------ */
4267 
4268 /* Some request codes. There are many more of these, but most are not
4269  exposed to end-user view. These are the public ones, all of the
4270  form 0x1000 + small_number.
4271 
4272  Core ones are in the range 0x00000000--0x0000ffff. The non-public
4273  ones start at 0x2000.
4274 */
4275 
4276 /* These macros are used by tools -- they must be public, but don't
4277  embed them into other programs. */
4278 # define VG_USERREQ_TOOL_BASE( a, b ) ( (unsigned int)( ( (a)&0xff ) << 24 | ( (b)&0xff ) << 16 ) )
4279 # define VG_IS_TOOL_USERREQ( a, b, v ) ( VG_USERREQ_TOOL_BASE( a, b ) == ( (v)&0xffff0000 ) )
4280 
4281 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
4282  This enum comprises an ABI exported by Valgrind to programs
4283  which use client requests. DO NOT CHANGE THE ORDER OF THESE
4284  ENTRIES, NOR DELETE ANY -- add new ones at the end. */
4285 typedef enum {
4288 
4289  /* These allow any function to be called from the simulated
4290  CPU but run on the real CPU. Nb: the first arg passed to
4291  the function is always the ThreadId of the running
4292  thread! So CLIENT_CALL0 actually requires a 1 arg
4293  function, etc. */
4298 
4299  /* Can be useful in regression testing suites -- eg. can
4300  send Valgrind's output to /dev/null and still count
4301  errors. */
4303 
4304  /* Allows the client program and/or gdbserver to execute a monitor
4305  command. */
4307 
4308  /* These are useful and can be interpreted by any tool that
4309  tracks malloc() et al, by using vg_replace_malloc.c. */
4313  /* Memory pool support. */
4322 
4323  /* Allow printfs to valgrind log. */
4324  /* The first two pass the va_list argument by value, which
4325  assumes it is the same size as or smaller than a UWord,
4326  which generally isn't the case. Hence are deprecated.
4327  The second two pass the vargs by reference and so are
4328  immune to this problem. */
4329  /* both :: char* fmt, va_list vargs (DEPRECATED) */
4332  /* both :: char* fmt, va_list* vargs */
4335 
4336  /* Stack support. */
4340 
4341  /* Wine support */
4343 
4344  /* Querying of debug info. */
4346 
4347  /* Disable/enable error reporting level. Takes a single
4348  Word arg which is the delta to this thread's error
4349  disablement indicator. Hence 1 disables or further
4350  disables errors, and -1 moves back towards enablement.
4351  Other values are not allowed. */
4353 
4354  /* Initialise IR injection */
4357 
4358 # if !defined( __GNUC__ )
4359 # define __extension__ /* */
4360 # endif
4361 
4362 /* Returns the number of Valgrinds this code is running under. That
4363  is, 0 if running natively, 1 if running under Valgrind, 2 if
4364  running under Valgrind which is running under another Valgrind,
4365  etc. */
4366 # define RUNNING_ON_VALGRIND \
4367  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 /* if not */, VG_USERREQ__RUNNING_ON_VALGRIND, 0, 0, 0, 0, 0 )
4368 
4369 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
4370  _qzz_len - 1]. Useful if you are debugging a JITter or some such,
4371  since it provides a way to make sure valgrind will retranslate the
4372  invalidated area. Returns no value. */
4373 # define VALGRIND_DISCARD_TRANSLATIONS( _qzz_addr, _qzz_len ) \
4374  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DISCARD_TRANSLATIONS, _qzz_addr, _qzz_len, 0, 0, 0 )
4375 
4376 /* These requests are for getting Valgrind itself to print something.
4377  Possibly with a backtrace. This is a really ugly hack. The return value
4378  is the number of characters printed, excluding the "**<pid>** " part at the
4379  start and the backtrace (if present). */
4380 
4381 # if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER )
4382 /* Modern GCC will optimize the static routine out if unused,
4383  and unused attribute will shut down warnings about it. */
4384 static int VALGRIND_PRINTF( const char* format, ... ) __attribute__( ( format( __printf__, 1, 2 ), __unused__ ) );
4385 # endif
4386 static int
4387 # if defined( _MSC_VER )
4388  __inline
4389 # endif
4390  VALGRIND_PRINTF( const char* format, ... ) {
4391 # if defined( NVALGRIND )
4392  return 0;
4393 # else /* NVALGRIND */
4394 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4395  uintptr_t _qzz_res;
4396 # else
4397  unsigned long _qzz_res;
4398 # endif
4399  va_list vargs;
4400  va_start( vargs, format );
4401 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4402  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__PRINTF_VALIST_BY_REF, (uintptr_t)format, (uintptr_t)&vargs,
4403  0, 0, 0 );
4404 # else
4405  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__PRINTF_VALIST_BY_REF, (unsigned long)format,
4406  (unsigned long)&vargs, 0, 0, 0 );
4407 # endif
4408  va_end( vargs );
4409  return (int)_qzz_res;
4410 # endif /* NVALGRIND */
4411 }
4412 
4413 # if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER )
4414 static int VALGRIND_PRINTF_BACKTRACE( const char* format, ... )
4415  __attribute__( ( format( __printf__, 1, 2 ), __unused__ ) );
4416 # endif
4417 static int
4418 # if defined( _MSC_VER )
4419  __inline
4420 # endif
4421  VALGRIND_PRINTF_BACKTRACE( const char* format, ... ) {
4422 # if defined( NVALGRIND )
4423  return 0;
4424 # else /* NVALGRIND */
4425 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4426  uintptr_t _qzz_res;
4427 # else
4428  unsigned long _qzz_res;
4429 # endif
4430  va_list vargs;
4431  va_start( vargs, format );
4432 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4434  (uintptr_t)&vargs, 0, 0, 0 );
4435 # else
4437  (unsigned long)&vargs, 0, 0, 0 );
4438 # endif
4439  va_end( vargs );
4440  return (int)_qzz_res;
4441 # endif /* NVALGRIND */
4442 }
4443 
4444 /* These requests allow control to move from the simulated CPU to the
4445  real CPU, calling an arbitary function.
4446 
4447  Note that the current ThreadId is inserted as the first argument.
4448  So this call:
4449 
4450  VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
4451 
4452  requires f to have this signature:
4453 
4454  Word f(Word tid, Word arg1, Word arg2)
4455 
4456  where "Word" is a word-sized type.
4457 
4458  Note that these client requests are not entirely reliable. For example,
4459  if you call a function with them that subsequently calls printf(),
4460  there's a high chance Valgrind will crash. Generally, your prospects of
4461  these working are made higher if the called function does not refer to
4462  any global variables, and does not refer to any libc or other functions
4463  (printf et al). Any kind of entanglement with libc or dynamic linking is
4464  likely to have a bad outcome, for tricky reasons which we've grappled
4465  with a lot in the past.
4466 */
4467 # define VALGRIND_NON_SIMD_CALL0( _qyy_fn ) \
4468  VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 /* default return */, VG_USERREQ__CLIENT_CALL0, _qyy_fn, 0, 0, 0, 0 )
4469 
4470 # define VALGRIND_NON_SIMD_CALL1( _qyy_fn, _qyy_arg1 ) \
4471  VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 /* default return */, VG_USERREQ__CLIENT_CALL1, _qyy_fn, _qyy_arg1, 0, 0, 0 )
4472 
4473 # define VALGRIND_NON_SIMD_CALL2( _qyy_fn, _qyy_arg1, _qyy_arg2 ) \
4474  VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 /* default return */, VG_USERREQ__CLIENT_CALL2, _qyy_fn, _qyy_arg1, _qyy_arg2, \
4475  0, 0 )
4476 
4477 # define VALGRIND_NON_SIMD_CALL3( _qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3 ) \
4478  VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 /* default return */, VG_USERREQ__CLIENT_CALL3, _qyy_fn, _qyy_arg1, _qyy_arg2, \
4479  _qyy_arg3, 0 )
4480 
4481 /* Counts the number of errors that have been recorded by a tool. Nb:
4482  the tool must record the errors with VG_(maybe_record_error)() or
4483  VG_(unique_error)() for them to be counted. */
4484 # define VALGRIND_COUNT_ERRORS \
4485  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 /* default return */, VG_USERREQ__COUNT_ERRORS, 0, 0, 0, 0, 0 )
4486 
4487 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
4488  when heap blocks are allocated in order to give accurate results. This
4489  happens automatically for the standard allocator functions such as
4490  malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
4491  delete[], etc.
4492 
4493  But if your program uses a custom allocator, this doesn't automatically
4494  happen, and Valgrind will not do as well. For example, if you allocate
4495  superblocks with mmap() and then allocates chunks of the superblocks, all
4496  Valgrind's observations will be at the mmap() level and it won't know that
4497  the chunks should be considered separate entities. In Memcheck's case,
4498  that means you probably won't get heap block overrun detection (because
4499  there won't be redzones marked as unaddressable) and you definitely won't
4500  get any leak detection.
4501 
4502  The following client requests allow a custom allocator to be annotated so
4503  that it can be handled accurately by Valgrind.
4504 
4505  VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
4506  by a malloc()-like function. For Memcheck (an illustrative case), this
4507  does two things:
4508 
4509  - It records that the block has been allocated. This means any addresses
4510  within the block mentioned in error messages will be
4511  identified as belonging to the block. It also means that if the block
4512  isn't freed it will be detected by the leak checker.
4513 
4514  - It marks the block as being addressable and undefined (if 'is_zeroed' is
4515  not set), or addressable and defined (if 'is_zeroed' is set). This
4516  controls how accesses to the block by the program are handled.
4517 
4518  'addr' is the start of the usable block (ie. after any
4519  redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
4520  can apply redzones -- these are blocks of padding at the start and end of
4521  each block. Adding redzones is recommended as it makes it much more likely
4522  Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
4523  zeroed (or filled with another predictable value), as is the case for
4524  calloc().
4525 
4526  VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
4527  heap block -- that will be used by the client program -- is allocated.
4528  It's best to put it at the outermost level of the allocator if possible;
4529  for example, if you have a function my_alloc() which calls
4530  internal_alloc(), and the client request is put inside internal_alloc(),
4531  stack traces relating to the heap block will contain entries for both
4532  my_alloc() and internal_alloc(), which is probably not what you want.
4533 
4534  For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
4535  custom blocks from within a heap block, B, that has been allocated with
4536  malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
4537  -- the custom blocks will take precedence.
4538 
4539  VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
4540  Memcheck, it does two things:
4541 
4542  - It records that the block has been deallocated. This assumes that the
4543  block was annotated as having been allocated via
4544  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
4545 
4546  - It marks the block as being unaddressable.
4547 
4548  VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
4549  heap block is deallocated.
4550 
4551  VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
4552  Memcheck, it does four things:
4553 
4554  - It records that the size of a block has been changed. This assumes that
4555  the block was annotated as having been allocated via
4556  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
4557 
4558  - If the block shrunk, it marks the freed memory as being unaddressable.
4559 
4560  - If the block grew, it marks the new area as undefined and defines a red
4561  zone past the end of the new block.
4562 
4563  - The V-bits of the overlap between the old and the new block are preserved.
4564 
4565  VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
4566  and before deallocation of the old block.
4567 
4568  In many cases, these three client requests will not be enough to get your
4569  allocator working well with Memcheck. More specifically, if your allocator
4570  writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
4571  will be necessary to mark the memory as addressable just before the zeroing
4572  occurs, otherwise you'll get a lot of invalid write errors. For example,
4573  you'll need to do this if your allocator recycles freed blocks, but it
4574  zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
4575  Alternatively, if your allocator reuses freed blocks for allocator-internal
4576  data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
4577 
4578  Really, what's happening is a blurring of the lines between the client
4579  program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
4580  memory should be considered unaddressable to the client program, but the
4581  allocator knows more than the rest of the client program and so may be able
4582  to safely access it. Extra client requests are necessary for Valgrind to
4583  understand the distinction between the allocator and the rest of the
4584  program.
4585 
4586  Ignored if addr == 0.
4587 */
4588 # define VALGRIND_MALLOCLIKE_BLOCK( addr, sizeB, rzB, is_zeroed ) \
4589  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MALLOCLIKE_BLOCK, addr, sizeB, rzB, is_zeroed, 0 )
4590 
4591 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
4592  Ignored if addr == 0.
4593 */
4594 # define VALGRIND_RESIZEINPLACE_BLOCK( addr, oldSizeB, newSizeB, rzB ) \
4595  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__RESIZEINPLACE_BLOCK, addr, oldSizeB, newSizeB, rzB, 0 )
4596 
4597 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
4598  Ignored if addr == 0.
4599 */
4600 # define VALGRIND_FREELIKE_BLOCK( addr, rzB ) \
4601  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__FREELIKE_BLOCK, addr, rzB, 0, 0, 0 )
4602 
4603 /* Create a memory pool. */
4604 # define VALGRIND_CREATE_MEMPOOL( pool, rzB, is_zeroed ) \
4605  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CREATE_MEMPOOL, pool, rzB, is_zeroed, 0, 0 )
4606 
4607 /* Destroy a memory pool. */
4608 # define VALGRIND_DESTROY_MEMPOOL( pool ) \
4609  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DESTROY_MEMPOOL, pool, 0, 0, 0, 0 )
4610 
4611 /* Associate a piece of memory with a memory pool. */
4612 # define VALGRIND_MEMPOOL_ALLOC( pool, addr, size ) \
4613  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_ALLOC, pool, addr, size, 0, 0 )
4614 
4615 /* Disassociate a piece of memory from a memory pool. */
4616 # define VALGRIND_MEMPOOL_FREE( pool, addr ) \
4617  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_FREE, pool, addr, 0, 0, 0 )
4618 
4619 /* Disassociate any pieces outside a particular range. */
4620 # define VALGRIND_MEMPOOL_TRIM( pool, addr, size ) \
4621  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_TRIM, pool, addr, size, 0, 0 )
4622 
4623 /* Resize and/or move a piece associated with a memory pool. */
4624 # define VALGRIND_MOVE_MEMPOOL( poolA, poolB ) \
4625  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MOVE_MEMPOOL, poolA, poolB, 0, 0, 0 )
4626 
4627 /* Resize and/or move a piece associated with a memory pool. */
4628 # define VALGRIND_MEMPOOL_CHANGE( pool, addrA, addrB, size ) \
4629  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_CHANGE, pool, addrA, addrB, size, 0 )
4630 
4631 /* Return 1 if a mempool exists, else 0. */
4632 # define VALGRIND_MEMPOOL_EXISTS( pool ) \
4633  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MEMPOOL_EXISTS, pool, 0, 0, 0, 0 )
4634 
4635 /* Mark a piece of memory as being a stack. Returns a stack id. */
4636 # define VALGRIND_STACK_REGISTER( start, end ) \
4637  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__STACK_REGISTER, start, end, 0, 0, 0 )
4638 
4639 /* Unmark the piece of memory associated with a stack id as being a
4640  stack. */
4641 # define VALGRIND_STACK_DEREGISTER( id ) \
4642  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_DEREGISTER, id, 0, 0, 0, 0 )
4643 
4644 /* Change the start and end address of the stack id. */
4645 # define VALGRIND_STACK_CHANGE( id, start, end ) \
4646  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_CHANGE, id, start, end, 0, 0 )
4647 
4648 /* Load PDB debug info for Wine PE image_map. */
4649 # define VALGRIND_LOAD_PDB_DEBUGINFO( fd, ptr, total_size, delta ) \
4650  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__LOAD_PDB_DEBUGINFO, fd, ptr, total_size, delta, 0 )
4651 
4652 /* Map a code address to a source file name and line number. buf64
4653  must point to a 64-byte buffer in the caller's address space. The
4654  result will be dumped in there and is guaranteed to be zero
4655  terminated. If no info is found, the first byte is set to zero. */
4656 # define VALGRIND_MAP_IP_TO_SRCLOC( addr, buf64 ) \
4657  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MAP_IP_TO_SRCLOC, addr, buf64, 0, 0, 0 )
4658 
4659 /* Disable error reporting for this thread. Behaves in a stack like
4660  way, so you can safely call this multiple times provided that
4661  VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
4662  to re-enable reporting. The first call of this macro disables
4663  reporting. Subsequent calls have no effect except to increase the
4664  number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
4665  reporting. Child threads do not inherit this setting from their
4666  parents -- they are always created with reporting enabled. */
4667 # define VALGRIND_DISABLE_ERROR_REPORTING \
4668  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, 1, 0, 0, 0, 0 )
4669 
4670 /* Re-enable error reporting, as per comments on
4671  VALGRIND_DISABLE_ERROR_REPORTING. */
4672 # define VALGRIND_ENABLE_ERROR_REPORTING \
4673  VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, -1, 0, 0, 0, 0 )
4674 
4675 /* Execute a monitor command from the client program.
4676  If a connection is opened with GDB, the output will be sent
4677  according to the output mode set for vgdb.
4678  If no connection is opened, output will go to the log output.
4679  Returns 1 if command not recognised, 0 otherwise. */
4680 # define VALGRIND_MONITOR_COMMAND( command ) \
4681  VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__GDB_MONITOR_COMMAND, command, 0, 0, 0, 0 )
4682 
4683 # undef PLAT_x86_darwin
4684 # undef PLAT_amd64_darwin
4685 # undef PLAT_x86_win32
4686 # undef PLAT_amd64_win64
4687 # undef PLAT_x86_linux
4688 # undef PLAT_amd64_linux
4689 # undef PLAT_ppc32_linux
4690 # undef PLAT_ppc64_linux
4691 # undef PLAT_arm_linux
4692 # undef PLAT_s390x_linux
4693 # undef PLAT_mips32_linux
4694 # undef PLAT_mips64_linux
4695 
4696 #endif /* __VALGRIND_H */
#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5)
#define __attribute__(x)
Definition: System.cpp:89
GAUDI_API std::string format(const char *,...)
MsgStream format utility "a la sprintf(...)".
Definition: MsgStream.cpp:109
Vg_ClientRequest