Go to the documentation of this file.
89 # define __VALGRIND_MAJOR__ 3
90 # define __VALGRIND_MINOR__ 8
108 # undef PLAT_x86_darwin
109 # undef PLAT_amd64_darwin
110 # undef PLAT_x86_win32
111 # undef PLAT_amd64_win64
112 # undef PLAT_x86_linux
113 # undef PLAT_amd64_linux
114 # undef PLAT_ppc32_linux
115 # undef PLAT_ppc64_linux
116 # undef PLAT_arm_linux
117 # undef PLAT_s390x_linux
118 # undef PLAT_mips32_linux
119 # undef PLAT_mips64_linux
121 # if defined( __APPLE__ ) && defined( __i386__ )
122 # define PLAT_x86_darwin 1
123 # elif defined( __APPLE__ ) && defined( __x86_64__ )
124 # define PLAT_amd64_darwin 1
125 # elif defined( __MINGW32__ ) || defined( __CYGWIN32__ ) || ( defined( _WIN32 ) && defined( _M_IX86 ) )
126 # define PLAT_x86_win32 1
127 # elif defined( __MINGW64__ ) || ( defined( _WIN64 ) && defined( _M_X64 ) )
128 # define PLAT_amd64_win64 1
129 # elif defined( __linux__ ) && defined( __i386__ )
130 # define PLAT_x86_linux 1
131 # elif defined( __linux__ ) && defined( __x86_64__ )
132 # define PLAT_amd64_linux 1
133 # elif defined( __linux__ ) && defined( __powerpc__ ) && !defined( __powerpc64__ )
134 # define PLAT_ppc32_linux 1
135 # elif defined( __linux__ ) && defined( __powerpc__ ) && defined( __powerpc64__ )
136 # define PLAT_ppc64_linux 1
137 # elif defined( __linux__ ) && defined( __arm__ )
138 # define PLAT_arm_linux 1
139 # elif defined( __linux__ ) && defined( __s390__ ) && defined( __s390x__ )
140 # define PLAT_s390x_linux 1
141 # elif defined( __linux__ ) && defined( __mips__ )
142 # if ( __mips == 64 )
143 # define PLAT_mips64_linux 1
145 # define PLAT_mips32_linux 1
150 # if !defined( NVALGRIND )
173 # define VALGRIND_DO_CLIENT_REQUEST( _zzq_rlval, _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, \
174 _zzq_arg4, _zzq_arg5 ) \
176 ( _zzq_rlval ) = VALGRIND_DO_CLIENT_REQUEST_EXPR( ( _zzq_default ), ( _zzq_request ), ( _zzq_arg1 ), \
177 ( _zzq_arg2 ), ( _zzq_arg3 ), ( _zzq_arg4 ), ( _zzq_arg5 ) ); \
180 # define VALGRIND_DO_CLIENT_REQUEST_STMT( _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5 ) \
182 (void)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, ( _zzq_request ), ( _zzq_arg1 ), ( _zzq_arg2 ), ( _zzq_arg3 ), \
183 ( _zzq_arg4 ), ( _zzq_arg5 ) ); \
186 # if defined( NVALGRIND )
191 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
234 # if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin ) || ( defined( PLAT_x86_win32 ) && defined( __GNUC__ ) )
240 # define __SPECIAL_INSTRUCTION_PREAMBLE \
241 "roll $3, %%edi ; roll $13, %%edi\n\t" \
242 "roll $29, %%edi ; roll $19, %%edi\n\t"
244 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
247 volatile unsigned int _zzq_args[6]; \
248 volatile unsigned int _zzq_result; \
249 _zzq_args[0] = (unsigned int)( _zzq_request ); \
250 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
251 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
252 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
253 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
254 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
255 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
256 "xchgl %%ebx,%%ebx" \
257 : "=d"( _zzq_result ) \
258 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
259 : "cc", "memory" ); \
263 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
265 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
266 volatile unsigned int __addr; \
267 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
268 "xchgl %%ecx,%%ecx" \
271 : "cc", "memory" ); \
272 _zzq_orig->nraddr = __addr; \
275 # define VALGRIND_CALL_NOREDIR_EAX \
276 __SPECIAL_INSTRUCTION_PREAMBLE \
278 "xchgl %%edx,%%edx\n\t"
280 # define VALGRIND_VEX_INJECT_IR() \
282 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgl %%edi,%%edi\n\t" : : : "cc", "memory" ); \
289 # if defined( PLAT_x86_win32 ) && !defined( __GNUC__ )
295 # if defined( _MSC_VER )
297 # define __SPECIAL_INSTRUCTION_PREAMBLE __asm rol edi, 3 __asm rol edi, 13 __asm rol edi, 29 __asm rol edi, 19
299 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, \
300 _zzq_arg4, _zzq_arg5 ) \
301 valgrind_do_client_request_expr( ( uintptr_t )( _zzq_default ), ( uintptr_t )( _zzq_request ), \
302 ( uintptr_t )( _zzq_arg1 ), ( uintptr_t )( _zzq_arg2 ), \
303 ( uintptr_t )( _zzq_arg3 ), ( uintptr_t )( _zzq_arg4 ), \
304 ( uintptr_t )( _zzq_arg5 ) )
306 static __inline uintptr_t valgrind_do_client_request_expr( uintptr_t _zzq_default, uintptr_t _zzq_request,
307 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
308 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
309 uintptr_t _zzq_arg5 ) {
310 volatile uintptr_t _zzq_args[6];
311 volatile unsigned int _zzq_result;
312 _zzq_args[0] = ( uintptr_t )( _zzq_request );
313 _zzq_args[1] = ( uintptr_t )( _zzq_arg1 );
314 _zzq_args[2] = ( uintptr_t )( _zzq_arg2 );
315 _zzq_args[3] = ( uintptr_t )( _zzq_arg3 );
316 _zzq_args[4] = ( uintptr_t )( _zzq_arg4 );
317 _zzq_args[5] = ( uintptr_t )( _zzq_arg5 );
318 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
319 __SPECIAL_INSTRUCTION_PREAMBLE
322 __asm mov _zzq_result, edx
327 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
329 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
330 volatile unsigned int __addr; \
331 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
333 __asm mov __addr, eax} \
334 _zzq_orig->nraddr = __addr; \
337 # define VALGRIND_CALL_NOREDIR_EAX ERROR
339 # define VALGRIND_VEX_INJECT_IR() \
341 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
342 __asm xchg edi,edi} \
346 # error Unsupported compiler.
353 # if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin )
356 unsigned long long int nraddr;
359 # define __SPECIAL_INSTRUCTION_PREAMBLE \
360 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
361 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
363 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
366 volatile unsigned long long int _zzq_args[6]; \
367 volatile unsigned long long int _zzq_result; \
368 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
369 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
370 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
371 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
372 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
373 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
374 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
375 "xchgq %%rbx,%%rbx" \
376 : "=d"( _zzq_result ) \
377 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
378 : "cc", "memory" ); \
382 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
384 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
385 volatile unsigned long long int __addr; \
386 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
387 "xchgq %%rcx,%%rcx" \
390 : "cc", "memory" ); \
391 _zzq_orig->nraddr = __addr; \
394 # define VALGRIND_CALL_NOREDIR_RAX \
395 __SPECIAL_INSTRUCTION_PREAMBLE \
397 "xchgq %%rdx,%%rdx\n\t"
399 # define VALGRIND_VEX_INJECT_IR() \
401 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgq %%rdi,%%rdi\n\t" : : : "cc", "memory" ); \
408 # if defined( PLAT_ppc32_linux )
414 # define __SPECIAL_INSTRUCTION_PREAMBLE \
415 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
416 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
418 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
422 unsigned int _zzq_args[6]; \
423 unsigned int _zzq_result; \
424 unsigned int* _zzq_ptr; \
425 _zzq_args[0] = (unsigned int)( _zzq_request ); \
426 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
427 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
428 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
429 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
430 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
431 _zzq_ptr = _zzq_args; \
432 __asm__ volatile( "mr 3,%1\n\t" \
434 __SPECIAL_INSTRUCTION_PREAMBLE \
437 : "=b"( _zzq_result ) \
438 : "b"( _zzq_default ), "b"( _zzq_ptr ) \
439 : "cc", "memory", "r3", "r4" ); \
443 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
445 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
446 unsigned int __addr; \
447 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
452 : "cc", "memory", "r3" ); \
453 _zzq_orig->nraddr = __addr; \
456 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
457 __SPECIAL_INSTRUCTION_PREAMBLE \
461 # define VALGRIND_VEX_INJECT_IR() \
462 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); } while ( 0 )
468 # if defined( PLAT_ppc64_linux )
471 unsigned long long int nraddr;
472 unsigned long long int r2;
475 # define __SPECIAL_INSTRUCTION_PREAMBLE \
476 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
477 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
479 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
483 unsigned long long int _zzq_args[6]; \
484 unsigned long long int _zzq_result; \
485 unsigned long long int* _zzq_ptr; \
486 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
487 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
488 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
489 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
490 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
491 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
492 _zzq_ptr = _zzq_args; \
493 __asm__ volatile( "mr 3,%1\n\t" \
495 __SPECIAL_INSTRUCTION_PREAMBLE \
498 : "=b"( _zzq_result ) \
499 : "b"( _zzq_default ), "b"( _zzq_ptr ) \
500 : "cc", "memory", "r3", "r4" ); \
504 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
506 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
507 unsigned long long int __addr; \
508 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
513 : "cc", "memory", "r3" ); \
514 _zzq_orig->nraddr = __addr; \
515 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
520 : "cc", "memory", "r3" ); \
521 _zzq_orig->r2 = __addr; \
524 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
525 __SPECIAL_INSTRUCTION_PREAMBLE \
529 # define VALGRIND_VEX_INJECT_IR() \
530 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); } while ( 0 )
536 # if defined( PLAT_arm_linux )
542 # define __SPECIAL_INSTRUCTION_PREAMBLE \
543 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
544 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
546 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
550 volatile unsigned int _zzq_args[6]; \
551 volatile unsigned int _zzq_result; \
552 _zzq_args[0] = (unsigned int)( _zzq_request ); \
553 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
554 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
555 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
556 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
557 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
558 __asm__ volatile( "mov r3, %1\n\t" \
560 __SPECIAL_INSTRUCTION_PREAMBLE \
561 "orr r10, r10, r10\n\t" \
563 : "=r"( _zzq_result ) \
564 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
565 : "cc", "memory", "r3", "r4" ); \
569 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
571 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
572 unsigned int __addr; \
573 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
574 "orr r11, r11, r11\n\t" \
578 : "cc", "memory", "r3" ); \
579 _zzq_orig->nraddr = __addr; \
582 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
583 __SPECIAL_INSTRUCTION_PREAMBLE \
585 "orr r12, r12, r12\n\t"
587 # define VALGRIND_VEX_INJECT_IR() \
588 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "orr r9, r9, r9\n\t" : : : "cc", "memory" ); } while ( 0 )
594 # if defined( PLAT_s390x_linux )
597 unsigned long long int nraddr;
604 # define __SPECIAL_INSTRUCTION_PREAMBLE \
610 # define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
611 # define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
612 # define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
613 # define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
615 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
618 volatile unsigned long long int _zzq_args[6]; \
619 volatile unsigned long long int _zzq_result; \
620 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
621 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
622 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
623 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
624 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
625 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
628 "lgr 3,%2\n\t" __SPECIAL_INSTRUCTION_PREAMBLE __CLIENT_REQUEST_CODE \
630 : "=d"( _zzq_result ) \
631 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
632 : "cc", "2", "3", "memory" ); \
636 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
638 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
639 volatile unsigned long long int __addr; \
640 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __GET_NR_CONTEXT_CODE "lgr %0, 3\n\t" \
643 : "cc", "3", "memory" ); \
644 _zzq_orig->nraddr = __addr; \
647 # define VALGRIND_CALL_NOREDIR_R1 \
648 __SPECIAL_INSTRUCTION_PREAMBLE \
651 # define VALGRIND_VEX_INJECT_IR() \
652 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __VEX_INJECT_IR_CODE ); } while ( 0 )
658 # if defined( PLAT_mips32_linux )
668 # define __SPECIAL_INSTRUCTION_PREAMBLE \
669 "srl $0, $0, 13\n\t" \
670 "srl $0, $0, 29\n\t" \
671 "srl $0, $0, 3\n\t" \
674 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
677 volatile unsigned int _zzq_args[6]; \
678 volatile unsigned int _zzq_result; \
679 _zzq_args[0] = (unsigned int)( _zzq_request ); \
680 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
681 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
682 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
683 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
684 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
685 __asm__ volatile( "move $11, %1\n\t" \
687 __SPECIAL_INSTRUCTION_PREAMBLE \
688 "or $13, $13, $13\n\t" \
690 : "=r"( _zzq_result ) \
691 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
696 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
698 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
699 volatile unsigned int __addr; \
700 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
701 "or $14, $14, $14\n\t" \
706 _zzq_orig->nraddr = __addr; \
709 # define VALGRIND_CALL_NOREDIR_T9 \
710 __SPECIAL_INSTRUCTION_PREAMBLE \
712 "or $15, $15, $15\n\t"
714 # define VALGRIND_VEX_INJECT_IR() \
715 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); } while ( 0 )
721 # if defined( PLAT_mips64_linux )
724 unsigned long long nraddr;
731 # define __SPECIAL_INSTRUCTION_PREAMBLE \
732 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
733 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
735 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
738 volatile unsigned long long int _zzq_args[6]; \
739 volatile unsigned long long int _zzq_result; \
740 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
741 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
742 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
743 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
744 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
745 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
746 __asm__ volatile( "move $11, %1\n\t" \
748 __SPECIAL_INSTRUCTION_PREAMBLE \
749 "or $13, $13, $13\n\t" \
751 : "=r"( _zzq_result ) \
752 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
757 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
759 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
760 volatile unsigned long long int __addr; \
761 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
762 "or $14, $14, $14\n\t" \
767 _zzq_orig->nraddr = __addr; \
770 # define VALGRIND_CALL_NOREDIR_T9 \
771 __SPECIAL_INSTRUCTION_PREAMBLE \
773 "or $15, $15, $15\n\t"
775 # define VALGRIND_VEX_INJECT_IR() \
776 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); } while ( 0 )
815 # define VG_CONCAT4( _aa, _bb, _cc, _dd ) _aa##_bb##_cc##_dd
817 # define I_WRAP_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgw00000ZU_, soname, _, fnname )
819 # define I_WRAP_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgw00000ZZ_, soname, _, fnname )
825 # define VALGRIND_GET_ORIG_FN( _lval ) VALGRIND_GET_NR_CONTEXT( _lval )
833 # define I_REPLACE_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgr00000ZU_, soname, _, fnname )
835 # define I_REPLACE_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgr00000ZZ_, soname, _, fnname )
840 # define CALL_FN_v_v( fnptr ) \
842 volatile unsigned long _junk; \
843 CALL_FN_W_v( _junk, fnptr ); \
846 # define CALL_FN_v_W( fnptr, arg1 ) \
848 volatile unsigned long _junk; \
849 CALL_FN_W_W( _junk, fnptr, arg1 ); \
852 # define CALL_FN_v_WW( fnptr, arg1, arg2 ) \
854 volatile unsigned long _junk; \
855 CALL_FN_W_WW( _junk, fnptr, arg1, arg2 ); \
858 # define CALL_FN_v_WWW( fnptr, arg1, arg2, arg3 ) \
860 volatile unsigned long _junk; \
861 CALL_FN_W_WWW( _junk, fnptr, arg1, arg2, arg3 ); \
864 # define CALL_FN_v_WWWW( fnptr, arg1, arg2, arg3, arg4 ) \
866 volatile unsigned long _junk; \
867 CALL_FN_W_WWWW( _junk, fnptr, arg1, arg2, arg3, arg4 ); \
870 # define CALL_FN_v_5W( fnptr, arg1, arg2, arg3, arg4, arg5 ) \
872 volatile unsigned long _junk; \
873 CALL_FN_W_5W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5 ); \
876 # define CALL_FN_v_6W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ) \
878 volatile unsigned long _junk; \
879 CALL_FN_W_6W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ); \
882 # define CALL_FN_v_7W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
884 volatile unsigned long _junk; \
885 CALL_FN_W_7W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ); \
890 # if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin )
894 # define __CALLER_SAVED_REGS "ecx", "edx"
901 # define VALGRIND_ALIGN_STACK \
902 "movl %%esp,%%edi\n\t" \
903 "andl $0xfffffff0,%%esp\n\t"
904 # define VALGRIND_RESTORE_STACK "movl %%edi,%%esp\n\t"
909 # define CALL_FN_W_v( lval, orig ) \
911 volatile OrigFn _orig = ( orig ); \
912 volatile unsigned long _argvec[1]; \
913 volatile unsigned long _res; \
914 _argvec[0] = (unsigned long)_orig.nraddr; \
915 __asm__ volatile( VALGRIND_ALIGN_STACK "movl (%%eax), %%eax\n\t" \
916 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
918 : "a"( &_argvec[0] ) \
919 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
920 lval = (__typeof__( lval ))_res; \
923 # define CALL_FN_W_W( lval, orig, arg1 ) \
925 volatile OrigFn _orig = ( orig ); \
926 volatile unsigned long _argvec[2]; \
927 volatile unsigned long _res; \
928 _argvec[0] = (unsigned long)_orig.nraddr; \
929 _argvec[1] = (unsigned long)( arg1 ); \
930 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
931 "pushl 4(%%eax)\n\t" \
932 "movl (%%eax), %%eax\n\t" \
933 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
935 : "a"( &_argvec[0] ) \
936 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
937 lval = (__typeof__( lval ))_res; \
940 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
942 volatile OrigFn _orig = ( orig ); \
943 volatile unsigned long _argvec[3]; \
944 volatile unsigned long _res; \
945 _argvec[0] = (unsigned long)_orig.nraddr; \
946 _argvec[1] = (unsigned long)( arg1 ); \
947 _argvec[2] = (unsigned long)( arg2 ); \
948 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
949 "pushl 8(%%eax)\n\t" \
950 "pushl 4(%%eax)\n\t" \
951 "movl (%%eax), %%eax\n\t" \
952 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
954 : "a"( &_argvec[0] ) \
955 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
956 lval = (__typeof__( lval ))_res; \
959 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
961 volatile OrigFn _orig = ( orig ); \
962 volatile unsigned long _argvec[4]; \
963 volatile unsigned long _res; \
964 _argvec[0] = (unsigned long)_orig.nraddr; \
965 _argvec[1] = (unsigned long)( arg1 ); \
966 _argvec[2] = (unsigned long)( arg2 ); \
967 _argvec[3] = (unsigned long)( arg3 ); \
968 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
969 "pushl 12(%%eax)\n\t" \
970 "pushl 8(%%eax)\n\t" \
971 "pushl 4(%%eax)\n\t" \
972 "movl (%%eax), %%eax\n\t" \
973 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
975 : "a"( &_argvec[0] ) \
976 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
977 lval = (__typeof__( lval ))_res; \
980 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
982 volatile OrigFn _orig = ( orig ); \
983 volatile unsigned long _argvec[5]; \
984 volatile unsigned long _res; \
985 _argvec[0] = (unsigned long)_orig.nraddr; \
986 _argvec[1] = (unsigned long)( arg1 ); \
987 _argvec[2] = (unsigned long)( arg2 ); \
988 _argvec[3] = (unsigned long)( arg3 ); \
989 _argvec[4] = (unsigned long)( arg4 ); \
990 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 16(%%eax)\n\t" \
991 "pushl 12(%%eax)\n\t" \
992 "pushl 8(%%eax)\n\t" \
993 "pushl 4(%%eax)\n\t" \
994 "movl (%%eax), %%eax\n\t" \
995 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
997 : "a"( &_argvec[0] ) \
998 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
999 lval = (__typeof__( lval ))_res; \
1002 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1004 volatile OrigFn _orig = ( orig ); \
1005 volatile unsigned long _argvec[6]; \
1006 volatile unsigned long _res; \
1007 _argvec[0] = (unsigned long)_orig.nraddr; \
1008 _argvec[1] = (unsigned long)( arg1 ); \
1009 _argvec[2] = (unsigned long)( arg2 ); \
1010 _argvec[3] = (unsigned long)( arg3 ); \
1011 _argvec[4] = (unsigned long)( arg4 ); \
1012 _argvec[5] = (unsigned long)( arg5 ); \
1013 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
1014 "pushl 20(%%eax)\n\t" \
1015 "pushl 16(%%eax)\n\t" \
1016 "pushl 12(%%eax)\n\t" \
1017 "pushl 8(%%eax)\n\t" \
1018 "pushl 4(%%eax)\n\t" \
1019 "movl (%%eax), %%eax\n\t" \
1020 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1022 : "a"( &_argvec[0] ) \
1023 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1024 lval = (__typeof__( lval ))_res; \
1027 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1029 volatile OrigFn _orig = ( orig ); \
1030 volatile unsigned long _argvec[7]; \
1031 volatile unsigned long _res; \
1032 _argvec[0] = (unsigned long)_orig.nraddr; \
1033 _argvec[1] = (unsigned long)( arg1 ); \
1034 _argvec[2] = (unsigned long)( arg2 ); \
1035 _argvec[3] = (unsigned long)( arg3 ); \
1036 _argvec[4] = (unsigned long)( arg4 ); \
1037 _argvec[5] = (unsigned long)( arg5 ); \
1038 _argvec[6] = (unsigned long)( arg6 ); \
1039 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
1040 "pushl 24(%%eax)\n\t" \
1041 "pushl 20(%%eax)\n\t" \
1042 "pushl 16(%%eax)\n\t" \
1043 "pushl 12(%%eax)\n\t" \
1044 "pushl 8(%%eax)\n\t" \
1045 "pushl 4(%%eax)\n\t" \
1046 "movl (%%eax), %%eax\n\t" \
1047 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1049 : "a"( &_argvec[0] ) \
1050 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1051 lval = (__typeof__( lval ))_res; \
1054 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1056 volatile OrigFn _orig = ( orig ); \
1057 volatile unsigned long _argvec[8]; \
1058 volatile unsigned long _res; \
1059 _argvec[0] = (unsigned long)_orig.nraddr; \
1060 _argvec[1] = (unsigned long)( arg1 ); \
1061 _argvec[2] = (unsigned long)( arg2 ); \
1062 _argvec[3] = (unsigned long)( arg3 ); \
1063 _argvec[4] = (unsigned long)( arg4 ); \
1064 _argvec[5] = (unsigned long)( arg5 ); \
1065 _argvec[6] = (unsigned long)( arg6 ); \
1066 _argvec[7] = (unsigned long)( arg7 ); \
1067 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1068 "pushl 28(%%eax)\n\t" \
1069 "pushl 24(%%eax)\n\t" \
1070 "pushl 20(%%eax)\n\t" \
1071 "pushl 16(%%eax)\n\t" \
1072 "pushl 12(%%eax)\n\t" \
1073 "pushl 8(%%eax)\n\t" \
1074 "pushl 4(%%eax)\n\t" \
1075 "movl (%%eax), %%eax\n\t" \
1076 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1078 : "a"( &_argvec[0] ) \
1079 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1080 lval = (__typeof__( lval ))_res; \
1083 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1085 volatile OrigFn _orig = ( orig ); \
1086 volatile unsigned long _argvec[9]; \
1087 volatile unsigned long _res; \
1088 _argvec[0] = (unsigned long)_orig.nraddr; \
1089 _argvec[1] = (unsigned long)( arg1 ); \
1090 _argvec[2] = (unsigned long)( arg2 ); \
1091 _argvec[3] = (unsigned long)( arg3 ); \
1092 _argvec[4] = (unsigned long)( arg4 ); \
1093 _argvec[5] = (unsigned long)( arg5 ); \
1094 _argvec[6] = (unsigned long)( arg6 ); \
1095 _argvec[7] = (unsigned long)( arg7 ); \
1096 _argvec[8] = (unsigned long)( arg8 ); \
1097 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 32(%%eax)\n\t" \
1098 "pushl 28(%%eax)\n\t" \
1099 "pushl 24(%%eax)\n\t" \
1100 "pushl 20(%%eax)\n\t" \
1101 "pushl 16(%%eax)\n\t" \
1102 "pushl 12(%%eax)\n\t" \
1103 "pushl 8(%%eax)\n\t" \
1104 "pushl 4(%%eax)\n\t" \
1105 "movl (%%eax), %%eax\n\t" \
1106 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1108 : "a"( &_argvec[0] ) \
1109 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1110 lval = (__typeof__( lval ))_res; \
1113 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1115 volatile OrigFn _orig = ( orig ); \
1116 volatile unsigned long _argvec[10]; \
1117 volatile unsigned long _res; \
1118 _argvec[0] = (unsigned long)_orig.nraddr; \
1119 _argvec[1] = (unsigned long)( arg1 ); \
1120 _argvec[2] = (unsigned long)( arg2 ); \
1121 _argvec[3] = (unsigned long)( arg3 ); \
1122 _argvec[4] = (unsigned long)( arg4 ); \
1123 _argvec[5] = (unsigned long)( arg5 ); \
1124 _argvec[6] = (unsigned long)( arg6 ); \
1125 _argvec[7] = (unsigned long)( arg7 ); \
1126 _argvec[8] = (unsigned long)( arg8 ); \
1127 _argvec[9] = (unsigned long)( arg9 ); \
1128 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
1129 "pushl 36(%%eax)\n\t" \
1130 "pushl 32(%%eax)\n\t" \
1131 "pushl 28(%%eax)\n\t" \
1132 "pushl 24(%%eax)\n\t" \
1133 "pushl 20(%%eax)\n\t" \
1134 "pushl 16(%%eax)\n\t" \
1135 "pushl 12(%%eax)\n\t" \
1136 "pushl 8(%%eax)\n\t" \
1137 "pushl 4(%%eax)\n\t" \
1138 "movl (%%eax), %%eax\n\t" \
1139 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1141 : "a"( &_argvec[0] ) \
1142 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1143 lval = (__typeof__( lval ))_res; \
1146 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
1148 volatile OrigFn _orig = ( orig ); \
1149 volatile unsigned long _argvec[11]; \
1150 volatile unsigned long _res; \
1151 _argvec[0] = (unsigned long)_orig.nraddr; \
1152 _argvec[1] = (unsigned long)( arg1 ); \
1153 _argvec[2] = (unsigned long)( arg2 ); \
1154 _argvec[3] = (unsigned long)( arg3 ); \
1155 _argvec[4] = (unsigned long)( arg4 ); \
1156 _argvec[5] = (unsigned long)( arg5 ); \
1157 _argvec[6] = (unsigned long)( arg6 ); \
1158 _argvec[7] = (unsigned long)( arg7 ); \
1159 _argvec[8] = (unsigned long)( arg8 ); \
1160 _argvec[9] = (unsigned long)( arg9 ); \
1161 _argvec[10] = (unsigned long)( arg10 ); \
1162 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
1163 "pushl 40(%%eax)\n\t" \
1164 "pushl 36(%%eax)\n\t" \
1165 "pushl 32(%%eax)\n\t" \
1166 "pushl 28(%%eax)\n\t" \
1167 "pushl 24(%%eax)\n\t" \
1168 "pushl 20(%%eax)\n\t" \
1169 "pushl 16(%%eax)\n\t" \
1170 "pushl 12(%%eax)\n\t" \
1171 "pushl 8(%%eax)\n\t" \
1172 "pushl 4(%%eax)\n\t" \
1173 "movl (%%eax), %%eax\n\t" \
1174 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1176 : "a"( &_argvec[0] ) \
1177 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1178 lval = (__typeof__( lval ))_res; \
1181 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
1183 volatile OrigFn _orig = ( orig ); \
1184 volatile unsigned long _argvec[12]; \
1185 volatile unsigned long _res; \
1186 _argvec[0] = (unsigned long)_orig.nraddr; \
1187 _argvec[1] = (unsigned long)( arg1 ); \
1188 _argvec[2] = (unsigned long)( arg2 ); \
1189 _argvec[3] = (unsigned long)( arg3 ); \
1190 _argvec[4] = (unsigned long)( arg4 ); \
1191 _argvec[5] = (unsigned long)( arg5 ); \
1192 _argvec[6] = (unsigned long)( arg6 ); \
1193 _argvec[7] = (unsigned long)( arg7 ); \
1194 _argvec[8] = (unsigned long)( arg8 ); \
1195 _argvec[9] = (unsigned long)( arg9 ); \
1196 _argvec[10] = (unsigned long)( arg10 ); \
1197 _argvec[11] = (unsigned long)( arg11 ); \
1198 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1199 "pushl 44(%%eax)\n\t" \
1200 "pushl 40(%%eax)\n\t" \
1201 "pushl 36(%%eax)\n\t" \
1202 "pushl 32(%%eax)\n\t" \
1203 "pushl 28(%%eax)\n\t" \
1204 "pushl 24(%%eax)\n\t" \
1205 "pushl 20(%%eax)\n\t" \
1206 "pushl 16(%%eax)\n\t" \
1207 "pushl 12(%%eax)\n\t" \
1208 "pushl 8(%%eax)\n\t" \
1209 "pushl 4(%%eax)\n\t" \
1210 "movl (%%eax), %%eax\n\t" \
1211 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1213 : "a"( &_argvec[0] ) \
1214 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1215 lval = (__typeof__( lval ))_res; \
1218 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
1220 volatile OrigFn _orig = ( orig ); \
1221 volatile unsigned long _argvec[13]; \
1222 volatile unsigned long _res; \
1223 _argvec[0] = (unsigned long)_orig.nraddr; \
1224 _argvec[1] = (unsigned long)( arg1 ); \
1225 _argvec[2] = (unsigned long)( arg2 ); \
1226 _argvec[3] = (unsigned long)( arg3 ); \
1227 _argvec[4] = (unsigned long)( arg4 ); \
1228 _argvec[5] = (unsigned long)( arg5 ); \
1229 _argvec[6] = (unsigned long)( arg6 ); \
1230 _argvec[7] = (unsigned long)( arg7 ); \
1231 _argvec[8] = (unsigned long)( arg8 ); \
1232 _argvec[9] = (unsigned long)( arg9 ); \
1233 _argvec[10] = (unsigned long)( arg10 ); \
1234 _argvec[11] = (unsigned long)( arg11 ); \
1235 _argvec[12] = (unsigned long)( arg12 ); \
1236 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 48(%%eax)\n\t" \
1237 "pushl 44(%%eax)\n\t" \
1238 "pushl 40(%%eax)\n\t" \
1239 "pushl 36(%%eax)\n\t" \
1240 "pushl 32(%%eax)\n\t" \
1241 "pushl 28(%%eax)\n\t" \
1242 "pushl 24(%%eax)\n\t" \
1243 "pushl 20(%%eax)\n\t" \
1244 "pushl 16(%%eax)\n\t" \
1245 "pushl 12(%%eax)\n\t" \
1246 "pushl 8(%%eax)\n\t" \
1247 "pushl 4(%%eax)\n\t" \
1248 "movl (%%eax), %%eax\n\t" \
1249 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1251 : "a"( &_argvec[0] ) \
1252 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1253 lval = (__typeof__( lval ))_res; \
1260 # if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin )
1265 # define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11"
1321 # if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM )
1322 # define __FRAME_POINTER , "r"( __builtin_dwarf_cfa() )
1323 # define VALGRIND_CFI_PROLOGUE \
1324 "movq %%rbp, %%r15\n\t" \
1325 "movq %2, %%rbp\n\t" \
1326 ".cfi_remember_state\n\t" \
1327 ".cfi_def_cfa rbp, 0\n\t"
1328 # define VALGRIND_CFI_EPILOGUE \
1329 "movq %%r15, %%rbp\n\t" \
1330 ".cfi_restore_state\n\t"
1332 # define __FRAME_POINTER
1333 # define VALGRIND_CFI_PROLOGUE
1334 # define VALGRIND_CFI_EPILOGUE
1342 # define VALGRIND_ALIGN_STACK \
1343 "movq %%rsp,%%r14\n\t" \
1344 "andq $0xfffffffffffffff0,%%rsp\n\t"
1345 # define VALGRIND_RESTORE_STACK "movq %%r14,%%rsp\n\t"
1371 # define CALL_FN_W_v( lval, orig ) \
1373 volatile OrigFn _orig = ( orig ); \
1374 volatile unsigned long _argvec[1]; \
1375 volatile unsigned long _res; \
1376 _argvec[0] = (unsigned long)_orig.nraddr; \
1377 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1378 "movq (%%rax), %%rax\n\t" \
1379 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1381 : "a"(&_argvec[0])__FRAME_POINTER \
1382 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1383 lval = (__typeof__( lval ))_res; \
1386 # define CALL_FN_W_W( lval, orig, arg1 ) \
1388 volatile OrigFn _orig = ( orig ); \
1389 volatile unsigned long _argvec[2]; \
1390 volatile unsigned long _res; \
1391 _argvec[0] = (unsigned long)_orig.nraddr; \
1392 _argvec[1] = (unsigned long)( arg1 ); \
1393 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1394 "movq 8(%%rax), %%rdi\n\t" \
1395 "movq (%%rax), %%rax\n\t" \
1396 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1398 : "a"(&_argvec[0])__FRAME_POINTER \
1399 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1400 lval = (__typeof__( lval ))_res; \
1403 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
1405 volatile OrigFn _orig = ( orig ); \
1406 volatile unsigned long _argvec[3]; \
1407 volatile unsigned long _res; \
1408 _argvec[0] = (unsigned long)_orig.nraddr; \
1409 _argvec[1] = (unsigned long)( arg1 ); \
1410 _argvec[2] = (unsigned long)( arg2 ); \
1411 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1412 "movq 16(%%rax), %%rsi\n\t" \
1413 "movq 8(%%rax), %%rdi\n\t" \
1414 "movq (%%rax), %%rax\n\t" \
1415 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1417 : "a"(&_argvec[0])__FRAME_POINTER \
1418 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1419 lval = (__typeof__( lval ))_res; \
1422 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
1424 volatile OrigFn _orig = ( orig ); \
1425 volatile unsigned long _argvec[4]; \
1426 volatile unsigned long _res; \
1427 _argvec[0] = (unsigned long)_orig.nraddr; \
1428 _argvec[1] = (unsigned long)( arg1 ); \
1429 _argvec[2] = (unsigned long)( arg2 ); \
1430 _argvec[3] = (unsigned long)( arg3 ); \
1431 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1432 "movq 24(%%rax), %%rdx\n\t" \
1433 "movq 16(%%rax), %%rsi\n\t" \
1434 "movq 8(%%rax), %%rdi\n\t" \
1435 "movq (%%rax), %%rax\n\t" \
1436 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1438 : "a"(&_argvec[0])__FRAME_POINTER \
1439 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1440 lval = (__typeof__( lval ))_res; \
1443 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
1445 volatile OrigFn _orig = ( orig ); \
1446 volatile unsigned long _argvec[5]; \
1447 volatile unsigned long _res; \
1448 _argvec[0] = (unsigned long)_orig.nraddr; \
1449 _argvec[1] = (unsigned long)( arg1 ); \
1450 _argvec[2] = (unsigned long)( arg2 ); \
1451 _argvec[3] = (unsigned long)( arg3 ); \
1452 _argvec[4] = (unsigned long)( arg4 ); \
1453 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1454 "movq 32(%%rax), %%rcx\n\t" \
1455 "movq 24(%%rax), %%rdx\n\t" \
1456 "movq 16(%%rax), %%rsi\n\t" \
1457 "movq 8(%%rax), %%rdi\n\t" \
1458 "movq (%%rax), %%rax\n\t" \
1459 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1461 : "a"(&_argvec[0])__FRAME_POINTER \
1462 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1463 lval = (__typeof__( lval ))_res; \
1466 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1468 volatile OrigFn _orig = ( orig ); \
1469 volatile unsigned long _argvec[6]; \
1470 volatile unsigned long _res; \
1471 _argvec[0] = (unsigned long)_orig.nraddr; \
1472 _argvec[1] = (unsigned long)( arg1 ); \
1473 _argvec[2] = (unsigned long)( arg2 ); \
1474 _argvec[3] = (unsigned long)( arg3 ); \
1475 _argvec[4] = (unsigned long)( arg4 ); \
1476 _argvec[5] = (unsigned long)( arg5 ); \
1477 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1478 "movq 40(%%rax), %%r8\n\t" \
1479 "movq 32(%%rax), %%rcx\n\t" \
1480 "movq 24(%%rax), %%rdx\n\t" \
1481 "movq 16(%%rax), %%rsi\n\t" \
1482 "movq 8(%%rax), %%rdi\n\t" \
1483 "movq (%%rax), %%rax\n\t" \
1484 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1486 : "a"(&_argvec[0])__FRAME_POINTER \
1487 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1488 lval = (__typeof__( lval ))_res; \
1491 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1493 volatile OrigFn _orig = ( orig ); \
1494 volatile unsigned long _argvec[7]; \
1495 volatile unsigned long _res; \
1496 _argvec[0] = (unsigned long)_orig.nraddr; \
1497 _argvec[1] = (unsigned long)( arg1 ); \
1498 _argvec[2] = (unsigned long)( arg2 ); \
1499 _argvec[3] = (unsigned long)( arg3 ); \
1500 _argvec[4] = (unsigned long)( arg4 ); \
1501 _argvec[5] = (unsigned long)( arg5 ); \
1502 _argvec[6] = (unsigned long)( arg6 ); \
1503 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1504 "movq 48(%%rax), %%r9\n\t" \
1505 "movq 40(%%rax), %%r8\n\t" \
1506 "movq 32(%%rax), %%rcx\n\t" \
1507 "movq 24(%%rax), %%rdx\n\t" \
1508 "movq 16(%%rax), %%rsi\n\t" \
1509 "movq 8(%%rax), %%rdi\n\t" \
1510 "movq (%%rax), %%rax\n\t" \
1511 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1513 : "a"(&_argvec[0])__FRAME_POINTER \
1514 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1515 lval = (__typeof__( lval ))_res; \
1518 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1520 volatile OrigFn _orig = ( orig ); \
1521 volatile unsigned long _argvec[8]; \
1522 volatile unsigned long _res; \
1523 _argvec[0] = (unsigned long)_orig.nraddr; \
1524 _argvec[1] = (unsigned long)( arg1 ); \
1525 _argvec[2] = (unsigned long)( arg2 ); \
1526 _argvec[3] = (unsigned long)( arg3 ); \
1527 _argvec[4] = (unsigned long)( arg4 ); \
1528 _argvec[5] = (unsigned long)( arg5 ); \
1529 _argvec[6] = (unsigned long)( arg6 ); \
1530 _argvec[7] = (unsigned long)( arg7 ); \
1531 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1532 "pushq 56(%%rax)\n\t" \
1533 "movq 48(%%rax), %%r9\n\t" \
1534 "movq 40(%%rax), %%r8\n\t" \
1535 "movq 32(%%rax), %%rcx\n\t" \
1536 "movq 24(%%rax), %%rdx\n\t" \
1537 "movq 16(%%rax), %%rsi\n\t" \
1538 "movq 8(%%rax), %%rdi\n\t" \
1539 "movq (%%rax), %%rax\n\t" \
1540 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1542 : "a"(&_argvec[0])__FRAME_POINTER \
1543 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1544 lval = (__typeof__( lval ))_res; \
1547 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1549 volatile OrigFn _orig = ( orig ); \
1550 volatile unsigned long _argvec[9]; \
1551 volatile unsigned long _res; \
1552 _argvec[0] = (unsigned long)_orig.nraddr; \
1553 _argvec[1] = (unsigned long)( arg1 ); \
1554 _argvec[2] = (unsigned long)( arg2 ); \
1555 _argvec[3] = (unsigned long)( arg3 ); \
1556 _argvec[4] = (unsigned long)( arg4 ); \
1557 _argvec[5] = (unsigned long)( arg5 ); \
1558 _argvec[6] = (unsigned long)( arg6 ); \
1559 _argvec[7] = (unsigned long)( arg7 ); \
1560 _argvec[8] = (unsigned long)( arg8 ); \
1561 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1562 "pushq 64(%%rax)\n\t" \
1563 "pushq 56(%%rax)\n\t" \
1564 "movq 48(%%rax), %%r9\n\t" \
1565 "movq 40(%%rax), %%r8\n\t" \
1566 "movq 32(%%rax), %%rcx\n\t" \
1567 "movq 24(%%rax), %%rdx\n\t" \
1568 "movq 16(%%rax), %%rsi\n\t" \
1569 "movq 8(%%rax), %%rdi\n\t" \
1570 "movq (%%rax), %%rax\n\t" \
1571 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1573 : "a"(&_argvec[0])__FRAME_POINTER \
1574 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1575 lval = (__typeof__( lval ))_res; \
1578 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1580 volatile OrigFn _orig = ( orig ); \
1581 volatile unsigned long _argvec[10]; \
1582 volatile unsigned long _res; \
1583 _argvec[0] = (unsigned long)_orig.nraddr; \
1584 _argvec[1] = (unsigned long)( arg1 ); \
1585 _argvec[2] = (unsigned long)( arg2 ); \
1586 _argvec[3] = (unsigned long)( arg3 ); \
1587 _argvec[4] = (unsigned long)( arg4 ); \
1588 _argvec[5] = (unsigned long)( arg5 ); \
1589 _argvec[6] = (unsigned long)( arg6 ); \
1590 _argvec[7] = (unsigned long)( arg7 ); \
1591 _argvec[8] = (unsigned long)( arg8 ); \
1592 _argvec[9] = (unsigned long)( arg9 ); \
1593 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1594 "pushq 72(%%rax)\n\t" \
1595 "pushq 64(%%rax)\n\t" \
1596 "pushq 56(%%rax)\n\t" \
1597 "movq 48(%%rax), %%r9\n\t" \
1598 "movq 40(%%rax), %%r8\n\t" \
1599 "movq 32(%%rax), %%rcx\n\t" \
1600 "movq 24(%%rax), %%rdx\n\t" \
1601 "movq 16(%%rax), %%rsi\n\t" \
1602 "movq 8(%%rax), %%rdi\n\t" \
1603 "movq (%%rax), %%rax\n\t" \
1604 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1606 : "a"(&_argvec[0])__FRAME_POINTER \
1607 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1608 lval = (__typeof__( lval ))_res; \
1611 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
1613 volatile OrigFn _orig = ( orig ); \
1614 volatile unsigned long _argvec[11]; \
1615 volatile unsigned long _res; \
1616 _argvec[0] = (unsigned long)_orig.nraddr; \
1617 _argvec[1] = (unsigned long)( arg1 ); \
1618 _argvec[2] = (unsigned long)( arg2 ); \
1619 _argvec[3] = (unsigned long)( arg3 ); \
1620 _argvec[4] = (unsigned long)( arg4 ); \
1621 _argvec[5] = (unsigned long)( arg5 ); \
1622 _argvec[6] = (unsigned long)( arg6 ); \
1623 _argvec[7] = (unsigned long)( arg7 ); \
1624 _argvec[8] = (unsigned long)( arg8 ); \
1625 _argvec[9] = (unsigned long)( arg9 ); \
1626 _argvec[10] = (unsigned long)( arg10 ); \
1627 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1628 "pushq 80(%%rax)\n\t" \
1629 "pushq 72(%%rax)\n\t" \
1630 "pushq 64(%%rax)\n\t" \
1631 "pushq 56(%%rax)\n\t" \
1632 "movq 48(%%rax), %%r9\n\t" \
1633 "movq 40(%%rax), %%r8\n\t" \
1634 "movq 32(%%rax), %%rcx\n\t" \
1635 "movq 24(%%rax), %%rdx\n\t" \
1636 "movq 16(%%rax), %%rsi\n\t" \
1637 "movq 8(%%rax), %%rdi\n\t" \
1638 "movq (%%rax), %%rax\n\t" \
1639 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1641 : "a"(&_argvec[0])__FRAME_POINTER \
1642 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1643 lval = (__typeof__( lval ))_res; \
1646 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
1648 volatile OrigFn _orig = ( orig ); \
1649 volatile unsigned long _argvec[12]; \
1650 volatile unsigned long _res; \
1651 _argvec[0] = (unsigned long)_orig.nraddr; \
1652 _argvec[1] = (unsigned long)( arg1 ); \
1653 _argvec[2] = (unsigned long)( arg2 ); \
1654 _argvec[3] = (unsigned long)( arg3 ); \
1655 _argvec[4] = (unsigned long)( arg4 ); \
1656 _argvec[5] = (unsigned long)( arg5 ); \
1657 _argvec[6] = (unsigned long)( arg6 ); \
1658 _argvec[7] = (unsigned long)( arg7 ); \
1659 _argvec[8] = (unsigned long)( arg8 ); \
1660 _argvec[9] = (unsigned long)( arg9 ); \
1661 _argvec[10] = (unsigned long)( arg10 ); \
1662 _argvec[11] = (unsigned long)( arg11 ); \
1663 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1664 "pushq 88(%%rax)\n\t" \
1665 "pushq 80(%%rax)\n\t" \
1666 "pushq 72(%%rax)\n\t" \
1667 "pushq 64(%%rax)\n\t" \
1668 "pushq 56(%%rax)\n\t" \
1669 "movq 48(%%rax), %%r9\n\t" \
1670 "movq 40(%%rax), %%r8\n\t" \
1671 "movq 32(%%rax), %%rcx\n\t" \
1672 "movq 24(%%rax), %%rdx\n\t" \
1673 "movq 16(%%rax), %%rsi\n\t" \
1674 "movq 8(%%rax), %%rdi\n\t" \
1675 "movq (%%rax), %%rax\n\t" \
1676 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1678 : "a"(&_argvec[0])__FRAME_POINTER \
1679 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1680 lval = (__typeof__( lval ))_res; \
1683 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
1685 volatile OrigFn _orig = ( orig ); \
1686 volatile unsigned long _argvec[13]; \
1687 volatile unsigned long _res; \
1688 _argvec[0] = (unsigned long)_orig.nraddr; \
1689 _argvec[1] = (unsigned long)( arg1 ); \
1690 _argvec[2] = (unsigned long)( arg2 ); \
1691 _argvec[3] = (unsigned long)( arg3 ); \
1692 _argvec[4] = (unsigned long)( arg4 ); \
1693 _argvec[5] = (unsigned long)( arg5 ); \
1694 _argvec[6] = (unsigned long)( arg6 ); \
1695 _argvec[7] = (unsigned long)( arg7 ); \
1696 _argvec[8] = (unsigned long)( arg8 ); \
1697 _argvec[9] = (unsigned long)( arg9 ); \
1698 _argvec[10] = (unsigned long)( arg10 ); \
1699 _argvec[11] = (unsigned long)( arg11 ); \
1700 _argvec[12] = (unsigned long)( arg12 ); \
1701 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1702 "pushq 96(%%rax)\n\t" \
1703 "pushq 88(%%rax)\n\t" \
1704 "pushq 80(%%rax)\n\t" \
1705 "pushq 72(%%rax)\n\t" \
1706 "pushq 64(%%rax)\n\t" \
1707 "pushq 56(%%rax)\n\t" \
1708 "movq 48(%%rax), %%r9\n\t" \
1709 "movq 40(%%rax), %%r8\n\t" \
1710 "movq 32(%%rax), %%rcx\n\t" \
1711 "movq 24(%%rax), %%rdx\n\t" \
1712 "movq 16(%%rax), %%rsi\n\t" \
1713 "movq 8(%%rax), %%rdi\n\t" \
1714 "movq (%%rax), %%rax\n\t" \
1715 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1717 : "a"(&_argvec[0])__FRAME_POINTER \
1718 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1719 lval = (__typeof__( lval ))_res; \
1726 # if defined( PLAT_ppc32_linux )
1752 # define __CALLER_SAVED_REGS \
1753 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
1754 "r7", "r8", "r9", "r10", "r11", "r12", "r13"
1761 # define VALGRIND_ALIGN_STACK \
1763 "rlwinm 1,1,0,0,27\n\t"
1764 # define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
1769 # define CALL_FN_W_v( lval, orig ) \
1771 volatile OrigFn _orig = ( orig ); \
1772 volatile unsigned long _argvec[1]; \
1773 volatile unsigned long _res; \
1774 _argvec[0] = (unsigned long)_orig.nraddr; \
1775 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1776 "lwz 11,0(11)\n\t" \
1777 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1779 : "r"( &_argvec[0] ) \
1780 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1781 lval = (__typeof__( lval ))_res; \
1784 # define CALL_FN_W_W( lval, orig, arg1 ) \
1786 volatile OrigFn _orig = ( orig ); \
1787 volatile unsigned long _argvec[2]; \
1788 volatile unsigned long _res; \
1789 _argvec[0] = (unsigned long)_orig.nraddr; \
1790 _argvec[1] = (unsigned long)arg1; \
1791 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1793 "lwz 11,0(11)\n\t" \
1794 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1796 : "r"( &_argvec[0] ) \
1797 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1798 lval = (__typeof__( lval ))_res; \
1801 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
1803 volatile OrigFn _orig = ( orig ); \
1804 volatile unsigned long _argvec[3]; \
1805 volatile unsigned long _res; \
1806 _argvec[0] = (unsigned long)_orig.nraddr; \
1807 _argvec[1] = (unsigned long)arg1; \
1808 _argvec[2] = (unsigned long)arg2; \
1809 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1812 "lwz 11,0(11)\n\t" \
1813 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1815 : "r"( &_argvec[0] ) \
1816 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1817 lval = (__typeof__( lval ))_res; \
1820 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
1822 volatile OrigFn _orig = ( orig ); \
1823 volatile unsigned long _argvec[4]; \
1824 volatile unsigned long _res; \
1825 _argvec[0] = (unsigned long)_orig.nraddr; \
1826 _argvec[1] = (unsigned long)arg1; \
1827 _argvec[2] = (unsigned long)arg2; \
1828 _argvec[3] = (unsigned long)arg3; \
1829 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1832 "lwz 5,12(11)\n\t" \
1833 "lwz 11,0(11)\n\t" \
1834 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1836 : "r"( &_argvec[0] ) \
1837 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1838 lval = (__typeof__( lval ))_res; \
1841 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
1843 volatile OrigFn _orig = ( orig ); \
1844 volatile unsigned long _argvec[5]; \
1845 volatile unsigned long _res; \
1846 _argvec[0] = (unsigned long)_orig.nraddr; \
1847 _argvec[1] = (unsigned long)arg1; \
1848 _argvec[2] = (unsigned long)arg2; \
1849 _argvec[3] = (unsigned long)arg3; \
1850 _argvec[4] = (unsigned long)arg4; \
1851 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1854 "lwz 5,12(11)\n\t" \
1855 "lwz 6,16(11)\n\t" \
1856 "lwz 11,0(11)\n\t" \
1857 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1859 : "r"( &_argvec[0] ) \
1860 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1861 lval = (__typeof__( lval ))_res; \
1864 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1866 volatile OrigFn _orig = ( orig ); \
1867 volatile unsigned long _argvec[6]; \
1868 volatile unsigned long _res; \
1869 _argvec[0] = (unsigned long)_orig.nraddr; \
1870 _argvec[1] = (unsigned long)arg1; \
1871 _argvec[2] = (unsigned long)arg2; \
1872 _argvec[3] = (unsigned long)arg3; \
1873 _argvec[4] = (unsigned long)arg4; \
1874 _argvec[5] = (unsigned long)arg5; \
1875 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1878 "lwz 5,12(11)\n\t" \
1879 "lwz 6,16(11)\n\t" \
1880 "lwz 7,20(11)\n\t" \
1881 "lwz 11,0(11)\n\t" \
1882 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1884 : "r"( &_argvec[0] ) \
1885 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1886 lval = (__typeof__( lval ))_res; \
1889 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1891 volatile OrigFn _orig = ( orig ); \
1892 volatile unsigned long _argvec[7]; \
1893 volatile unsigned long _res; \
1894 _argvec[0] = (unsigned long)_orig.nraddr; \
1895 _argvec[1] = (unsigned long)arg1; \
1896 _argvec[2] = (unsigned long)arg2; \
1897 _argvec[3] = (unsigned long)arg3; \
1898 _argvec[4] = (unsigned long)arg4; \
1899 _argvec[5] = (unsigned long)arg5; \
1900 _argvec[6] = (unsigned long)arg6; \
1901 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1904 "lwz 5,12(11)\n\t" \
1905 "lwz 6,16(11)\n\t" \
1906 "lwz 7,20(11)\n\t" \
1907 "lwz 8,24(11)\n\t" \
1908 "lwz 11,0(11)\n\t" \
1909 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1911 : "r"( &_argvec[0] ) \
1912 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1913 lval = (__typeof__( lval ))_res; \
1916 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1918 volatile OrigFn _orig = ( orig ); \
1919 volatile unsigned long _argvec[8]; \
1920 volatile unsigned long _res; \
1921 _argvec[0] = (unsigned long)_orig.nraddr; \
1922 _argvec[1] = (unsigned long)arg1; \
1923 _argvec[2] = (unsigned long)arg2; \
1924 _argvec[3] = (unsigned long)arg3; \
1925 _argvec[4] = (unsigned long)arg4; \
1926 _argvec[5] = (unsigned long)arg5; \
1927 _argvec[6] = (unsigned long)arg6; \
1928 _argvec[7] = (unsigned long)arg7; \
1929 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1932 "lwz 5,12(11)\n\t" \
1933 "lwz 6,16(11)\n\t" \
1934 "lwz 7,20(11)\n\t" \
1935 "lwz 8,24(11)\n\t" \
1936 "lwz 9,28(11)\n\t" \
1937 "lwz 11,0(11)\n\t" \
1938 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1940 : "r"( &_argvec[0] ) \
1941 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1942 lval = (__typeof__( lval ))_res; \
1945 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1947 volatile OrigFn _orig = ( orig ); \
1948 volatile unsigned long _argvec[9]; \
1949 volatile unsigned long _res; \
1950 _argvec[0] = (unsigned long)_orig.nraddr; \
1951 _argvec[1] = (unsigned long)arg1; \
1952 _argvec[2] = (unsigned long)arg2; \
1953 _argvec[3] = (unsigned long)arg3; \
1954 _argvec[4] = (unsigned long)arg4; \
1955 _argvec[5] = (unsigned long)arg5; \
1956 _argvec[6] = (unsigned long)arg6; \
1957 _argvec[7] = (unsigned long)arg7; \
1958 _argvec[8] = (unsigned long)arg8; \
1959 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1962 "lwz 5,12(11)\n\t" \
1963 "lwz 6,16(11)\n\t" \
1964 "lwz 7,20(11)\n\t" \
1965 "lwz 8,24(11)\n\t" \
1966 "lwz 9,28(11)\n\t" \
1967 "lwz 10,32(11)\n\t" \
1968 "lwz 11,0(11)\n\t" \
1969 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1971 : "r"( &_argvec[0] ) \
1972 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1973 lval = (__typeof__( lval ))_res; \
1976 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1978 volatile OrigFn _orig = ( orig ); \
1979 volatile unsigned long _argvec[10]; \
1980 volatile unsigned long _res; \
1981 _argvec[0] = (unsigned long)_orig.nraddr; \
1982 _argvec[1] = (unsigned long)arg1; \
1983 _argvec[2] = (unsigned long)arg2; \
1984 _argvec[3] = (unsigned long)arg3; \
1985 _argvec[4] = (unsigned long)arg4; \
1986 _argvec[5] = (unsigned long)arg5; \
1987 _argvec[6] = (unsigned long)arg6; \
1988 _argvec[7] = (unsigned long)arg7; \
1989 _argvec[8] = (unsigned long)arg8; \
1990 _argvec[9] = (unsigned long)arg9; \
1991 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1992 "addi 1,1,-16\n\t" \
1993 "lwz 3,36(11)\n\t" \
1997 "lwz 5,12(11)\n\t" \
1998 "lwz 6,16(11)\n\t" \
1999 "lwz 7,20(11)\n\t" \
2000 "lwz 8,24(11)\n\t" \
2001 "lwz 9,28(11)\n\t" \
2002 "lwz 10,32(11)\n\t" \
2003 "lwz 11,0(11)\n\t" \
2004 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2006 : "r"( &_argvec[0] ) \
2007 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2008 lval = (__typeof__( lval ))_res; \
2011 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2013 volatile OrigFn _orig = ( orig ); \
2014 volatile unsigned long _argvec[11]; \
2015 volatile unsigned long _res; \
2016 _argvec[0] = (unsigned long)_orig.nraddr; \
2017 _argvec[1] = (unsigned long)arg1; \
2018 _argvec[2] = (unsigned long)arg2; \
2019 _argvec[3] = (unsigned long)arg3; \
2020 _argvec[4] = (unsigned long)arg4; \
2021 _argvec[5] = (unsigned long)arg5; \
2022 _argvec[6] = (unsigned long)arg6; \
2023 _argvec[7] = (unsigned long)arg7; \
2024 _argvec[8] = (unsigned long)arg8; \
2025 _argvec[9] = (unsigned long)arg9; \
2026 _argvec[10] = (unsigned long)arg10; \
2027 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2028 "addi 1,1,-16\n\t" \
2029 "lwz 3,40(11)\n\t" \
2031 "lwz 3,36(11)\n\t" \
2035 "lwz 5,12(11)\n\t" \
2036 "lwz 6,16(11)\n\t" \
2037 "lwz 7,20(11)\n\t" \
2038 "lwz 8,24(11)\n\t" \
2039 "lwz 9,28(11)\n\t" \
2040 "lwz 10,32(11)\n\t" \
2041 "lwz 11,0(11)\n\t" \
2042 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2044 : "r"( &_argvec[0] ) \
2045 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2046 lval = (__typeof__( lval ))_res; \
2049 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2051 volatile OrigFn _orig = ( orig ); \
2052 volatile unsigned long _argvec[12]; \
2053 volatile unsigned long _res; \
2054 _argvec[0] = (unsigned long)_orig.nraddr; \
2055 _argvec[1] = (unsigned long)arg1; \
2056 _argvec[2] = (unsigned long)arg2; \
2057 _argvec[3] = (unsigned long)arg3; \
2058 _argvec[4] = (unsigned long)arg4; \
2059 _argvec[5] = (unsigned long)arg5; \
2060 _argvec[6] = (unsigned long)arg6; \
2061 _argvec[7] = (unsigned long)arg7; \
2062 _argvec[8] = (unsigned long)arg8; \
2063 _argvec[9] = (unsigned long)arg9; \
2064 _argvec[10] = (unsigned long)arg10; \
2065 _argvec[11] = (unsigned long)arg11; \
2066 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2067 "addi 1,1,-32\n\t" \
2068 "lwz 3,44(11)\n\t" \
2070 "lwz 3,40(11)\n\t" \
2072 "lwz 3,36(11)\n\t" \
2076 "lwz 5,12(11)\n\t" \
2077 "lwz 6,16(11)\n\t" \
2078 "lwz 7,20(11)\n\t" \
2079 "lwz 8,24(11)\n\t" \
2080 "lwz 9,28(11)\n\t" \
2081 "lwz 10,32(11)\n\t" \
2082 "lwz 11,0(11)\n\t" \
2083 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2085 : "r"( &_argvec[0] ) \
2086 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2087 lval = (__typeof__( lval ))_res; \
2090 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2092 volatile OrigFn _orig = ( orig ); \
2093 volatile unsigned long _argvec[13]; \
2094 volatile unsigned long _res; \
2095 _argvec[0] = (unsigned long)_orig.nraddr; \
2096 _argvec[1] = (unsigned long)arg1; \
2097 _argvec[2] = (unsigned long)arg2; \
2098 _argvec[3] = (unsigned long)arg3; \
2099 _argvec[4] = (unsigned long)arg4; \
2100 _argvec[5] = (unsigned long)arg5; \
2101 _argvec[6] = (unsigned long)arg6; \
2102 _argvec[7] = (unsigned long)arg7; \
2103 _argvec[8] = (unsigned long)arg8; \
2104 _argvec[9] = (unsigned long)arg9; \
2105 _argvec[10] = (unsigned long)arg10; \
2106 _argvec[11] = (unsigned long)arg11; \
2107 _argvec[12] = (unsigned long)arg12; \
2108 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2109 "addi 1,1,-32\n\t" \
2110 "lwz 3,48(11)\n\t" \
2112 "lwz 3,44(11)\n\t" \
2114 "lwz 3,40(11)\n\t" \
2116 "lwz 3,36(11)\n\t" \
2120 "lwz 5,12(11)\n\t" \
2121 "lwz 6,16(11)\n\t" \
2122 "lwz 7,20(11)\n\t" \
2123 "lwz 8,24(11)\n\t" \
2124 "lwz 9,28(11)\n\t" \
2125 "lwz 10,32(11)\n\t" \
2126 "lwz 11,0(11)\n\t" \
2127 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2129 : "r"( &_argvec[0] ) \
2130 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2131 lval = (__typeof__( lval ))_res; \
2138 # if defined( PLAT_ppc64_linux )
2143 # define __CALLER_SAVED_REGS \
2144 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
2145 "r7", "r8", "r9", "r10", "r11", "r12", "r13"
2152 # define VALGRIND_ALIGN_STACK \
2154 "rldicr 1,1,0,59\n\t"
2155 # define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
2160 # define CALL_FN_W_v( lval, orig ) \
2162 volatile OrigFn _orig = ( orig ); \
2163 volatile unsigned long _argvec[3 + 0]; \
2164 volatile unsigned long _res; \
2166 _argvec[1] = (unsigned long)_orig.r2; \
2167 _argvec[2] = (unsigned long)_orig.nraddr; \
2168 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2169 "std 2,-16(11)\n\t" \
2171 "ld 11, 0(11)\n\t" \
2172 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2174 "ld 2,-16(11)\n\t" \
2175 VALGRIND_RESTORE_STACK \
2177 : "r"( &_argvec[2] ) \
2178 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2179 lval = (__typeof__( lval ))_res; \
2182 # define CALL_FN_W_W( lval, orig, arg1 ) \
2184 volatile OrigFn _orig = ( orig ); \
2185 volatile unsigned long _argvec[3 + 1]; \
2186 volatile unsigned long _res; \
2188 _argvec[1] = (unsigned long)_orig.r2; \
2189 _argvec[2] = (unsigned long)_orig.nraddr; \
2190 _argvec[2 + 1] = (unsigned long)arg1; \
2191 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2192 "std 2,-16(11)\n\t" \
2195 "ld 11, 0(11)\n\t" \
2196 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2198 "ld 2,-16(11)\n\t" \
2199 VALGRIND_RESTORE_STACK \
2201 : "r"( &_argvec[2] ) \
2202 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2203 lval = (__typeof__( lval ))_res; \
2206 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
2208 volatile OrigFn _orig = ( orig ); \
2209 volatile unsigned long _argvec[3 + 2]; \
2210 volatile unsigned long _res; \
2212 _argvec[1] = (unsigned long)_orig.r2; \
2213 _argvec[2] = (unsigned long)_orig.nraddr; \
2214 _argvec[2 + 1] = (unsigned long)arg1; \
2215 _argvec[2 + 2] = (unsigned long)arg2; \
2216 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2217 "std 2,-16(11)\n\t" \
2220 "ld 4, 16(11)\n\t" \
2221 "ld 11, 0(11)\n\t" \
2222 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2224 "ld 2,-16(11)\n\t" \
2225 VALGRIND_RESTORE_STACK \
2227 : "r"( &_argvec[2] ) \
2228 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2229 lval = (__typeof__( lval ))_res; \
2232 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
2234 volatile OrigFn _orig = ( orig ); \
2235 volatile unsigned long _argvec[3 + 3]; \
2236 volatile unsigned long _res; \
2238 _argvec[1] = (unsigned long)_orig.r2; \
2239 _argvec[2] = (unsigned long)_orig.nraddr; \
2240 _argvec[2 + 1] = (unsigned long)arg1; \
2241 _argvec[2 + 2] = (unsigned long)arg2; \
2242 _argvec[2 + 3] = (unsigned long)arg3; \
2243 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2244 "std 2,-16(11)\n\t" \
2247 "ld 4, 16(11)\n\t" \
2248 "ld 5, 24(11)\n\t" \
2249 "ld 11, 0(11)\n\t" \
2250 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2252 "ld 2,-16(11)\n\t" \
2253 VALGRIND_RESTORE_STACK \
2255 : "r"( &_argvec[2] ) \
2256 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2257 lval = (__typeof__( lval ))_res; \
2260 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
2262 volatile OrigFn _orig = ( orig ); \
2263 volatile unsigned long _argvec[3 + 4]; \
2264 volatile unsigned long _res; \
2266 _argvec[1] = (unsigned long)_orig.r2; \
2267 _argvec[2] = (unsigned long)_orig.nraddr; \
2268 _argvec[2 + 1] = (unsigned long)arg1; \
2269 _argvec[2 + 2] = (unsigned long)arg2; \
2270 _argvec[2 + 3] = (unsigned long)arg3; \
2271 _argvec[2 + 4] = (unsigned long)arg4; \
2272 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2273 "std 2,-16(11)\n\t" \
2276 "ld 4, 16(11)\n\t" \
2277 "ld 5, 24(11)\n\t" \
2278 "ld 6, 32(11)\n\t" \
2279 "ld 11, 0(11)\n\t" \
2280 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2282 "ld 2,-16(11)\n\t" \
2283 VALGRIND_RESTORE_STACK \
2285 : "r"( &_argvec[2] ) \
2286 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2287 lval = (__typeof__( lval ))_res; \
2290 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
2292 volatile OrigFn _orig = ( orig ); \
2293 volatile unsigned long _argvec[3 + 5]; \
2294 volatile unsigned long _res; \
2296 _argvec[1] = (unsigned long)_orig.r2; \
2297 _argvec[2] = (unsigned long)_orig.nraddr; \
2298 _argvec[2 + 1] = (unsigned long)arg1; \
2299 _argvec[2 + 2] = (unsigned long)arg2; \
2300 _argvec[2 + 3] = (unsigned long)arg3; \
2301 _argvec[2 + 4] = (unsigned long)arg4; \
2302 _argvec[2 + 5] = (unsigned long)arg5; \
2303 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2304 "std 2,-16(11)\n\t" \
2307 "ld 4, 16(11)\n\t" \
2308 "ld 5, 24(11)\n\t" \
2309 "ld 6, 32(11)\n\t" \
2310 "ld 7, 40(11)\n\t" \
2311 "ld 11, 0(11)\n\t" \
2312 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2314 "ld 2,-16(11)\n\t" \
2315 VALGRIND_RESTORE_STACK \
2317 : "r"( &_argvec[2] ) \
2318 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2319 lval = (__typeof__( lval ))_res; \
2322 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
2324 volatile OrigFn _orig = ( orig ); \
2325 volatile unsigned long _argvec[3 + 6]; \
2326 volatile unsigned long _res; \
2328 _argvec[1] = (unsigned long)_orig.r2; \
2329 _argvec[2] = (unsigned long)_orig.nraddr; \
2330 _argvec[2 + 1] = (unsigned long)arg1; \
2331 _argvec[2 + 2] = (unsigned long)arg2; \
2332 _argvec[2 + 3] = (unsigned long)arg3; \
2333 _argvec[2 + 4] = (unsigned long)arg4; \
2334 _argvec[2 + 5] = (unsigned long)arg5; \
2335 _argvec[2 + 6] = (unsigned long)arg6; \
2336 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2337 "std 2,-16(11)\n\t" \
2340 "ld 4, 16(11)\n\t" \
2341 "ld 5, 24(11)\n\t" \
2342 "ld 6, 32(11)\n\t" \
2343 "ld 7, 40(11)\n\t" \
2344 "ld 8, 48(11)\n\t" \
2345 "ld 11, 0(11)\n\t" \
2346 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2348 "ld 2,-16(11)\n\t" \
2349 VALGRIND_RESTORE_STACK \
2351 : "r"( &_argvec[2] ) \
2352 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2353 lval = (__typeof__( lval ))_res; \
2356 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
2358 volatile OrigFn _orig = ( orig ); \
2359 volatile unsigned long _argvec[3 + 7]; \
2360 volatile unsigned long _res; \
2362 _argvec[1] = (unsigned long)_orig.r2; \
2363 _argvec[2] = (unsigned long)_orig.nraddr; \
2364 _argvec[2 + 1] = (unsigned long)arg1; \
2365 _argvec[2 + 2] = (unsigned long)arg2; \
2366 _argvec[2 + 3] = (unsigned long)arg3; \
2367 _argvec[2 + 4] = (unsigned long)arg4; \
2368 _argvec[2 + 5] = (unsigned long)arg5; \
2369 _argvec[2 + 6] = (unsigned long)arg6; \
2370 _argvec[2 + 7] = (unsigned long)arg7; \
2371 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2372 "std 2,-16(11)\n\t" \
2375 "ld 4, 16(11)\n\t" \
2376 "ld 5, 24(11)\n\t" \
2377 "ld 6, 32(11)\n\t" \
2378 "ld 7, 40(11)\n\t" \
2379 "ld 8, 48(11)\n\t" \
2380 "ld 9, 56(11)\n\t" \
2381 "ld 11, 0(11)\n\t" \
2382 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2384 "ld 2,-16(11)\n\t" \
2385 VALGRIND_RESTORE_STACK \
2387 : "r"( &_argvec[2] ) \
2388 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2389 lval = (__typeof__( lval ))_res; \
2392 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
2394 volatile OrigFn _orig = ( orig ); \
2395 volatile unsigned long _argvec[3 + 8]; \
2396 volatile unsigned long _res; \
2398 _argvec[1] = (unsigned long)_orig.r2; \
2399 _argvec[2] = (unsigned long)_orig.nraddr; \
2400 _argvec[2 + 1] = (unsigned long)arg1; \
2401 _argvec[2 + 2] = (unsigned long)arg2; \
2402 _argvec[2 + 3] = (unsigned long)arg3; \
2403 _argvec[2 + 4] = (unsigned long)arg4; \
2404 _argvec[2 + 5] = (unsigned long)arg5; \
2405 _argvec[2 + 6] = (unsigned long)arg6; \
2406 _argvec[2 + 7] = (unsigned long)arg7; \
2407 _argvec[2 + 8] = (unsigned long)arg8; \
2408 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2409 "std 2,-16(11)\n\t" \
2412 "ld 4, 16(11)\n\t" \
2413 "ld 5, 24(11)\n\t" \
2414 "ld 6, 32(11)\n\t" \
2415 "ld 7, 40(11)\n\t" \
2416 "ld 8, 48(11)\n\t" \
2417 "ld 9, 56(11)\n\t" \
2418 "ld 10, 64(11)\n\t" \
2419 "ld 11, 0(11)\n\t" \
2420 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2422 "ld 2,-16(11)\n\t" \
2423 VALGRIND_RESTORE_STACK \
2425 : "r"( &_argvec[2] ) \
2426 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2427 lval = (__typeof__( lval ))_res; \
2430 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
2432 volatile OrigFn _orig = ( orig ); \
2433 volatile unsigned long _argvec[3 + 9]; \
2434 volatile unsigned long _res; \
2436 _argvec[1] = (unsigned long)_orig.r2; \
2437 _argvec[2] = (unsigned long)_orig.nraddr; \
2438 _argvec[2 + 1] = (unsigned long)arg1; \
2439 _argvec[2 + 2] = (unsigned long)arg2; \
2440 _argvec[2 + 3] = (unsigned long)arg3; \
2441 _argvec[2 + 4] = (unsigned long)arg4; \
2442 _argvec[2 + 5] = (unsigned long)arg5; \
2443 _argvec[2 + 6] = (unsigned long)arg6; \
2444 _argvec[2 + 7] = (unsigned long)arg7; \
2445 _argvec[2 + 8] = (unsigned long)arg8; \
2446 _argvec[2 + 9] = (unsigned long)arg9; \
2447 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2448 "std 2,-16(11)\n\t" \
2450 "addi 1,1,-128\n\t" \
2452 "std 3,112(1)\n\t" \
2454 "ld 4, 16(11)\n\t" \
2455 "ld 5, 24(11)\n\t" \
2456 "ld 6, 32(11)\n\t" \
2457 "ld 7, 40(11)\n\t" \
2458 "ld 8, 48(11)\n\t" \
2459 "ld 9, 56(11)\n\t" \
2460 "ld 10, 64(11)\n\t" \
2461 "ld 11, 0(11)\n\t" \
2462 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2464 "ld 2,-16(11)\n\t" \
2465 VALGRIND_RESTORE_STACK \
2467 : "r"( &_argvec[2] ) \
2468 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2469 lval = (__typeof__( lval ))_res; \
2472 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2474 volatile OrigFn _orig = ( orig ); \
2475 volatile unsigned long _argvec[3 + 10]; \
2476 volatile unsigned long _res; \
2478 _argvec[1] = (unsigned long)_orig.r2; \
2479 _argvec[2] = (unsigned long)_orig.nraddr; \
2480 _argvec[2 + 1] = (unsigned long)arg1; \
2481 _argvec[2 + 2] = (unsigned long)arg2; \
2482 _argvec[2 + 3] = (unsigned long)arg3; \
2483 _argvec[2 + 4] = (unsigned long)arg4; \
2484 _argvec[2 + 5] = (unsigned long)arg5; \
2485 _argvec[2 + 6] = (unsigned long)arg6; \
2486 _argvec[2 + 7] = (unsigned long)arg7; \
2487 _argvec[2 + 8] = (unsigned long)arg8; \
2488 _argvec[2 + 9] = (unsigned long)arg9; \
2489 _argvec[2 + 10] = (unsigned long)arg10; \
2490 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2491 "std 2,-16(11)\n\t" \
2493 "addi 1,1,-128\n\t" \
2495 "std 3,120(1)\n\t" \
2497 "std 3,112(1)\n\t" \
2499 "ld 4, 16(11)\n\t" \
2500 "ld 5, 24(11)\n\t" \
2501 "ld 6, 32(11)\n\t" \
2502 "ld 7, 40(11)\n\t" \
2503 "ld 8, 48(11)\n\t" \
2504 "ld 9, 56(11)\n\t" \
2505 "ld 10, 64(11)\n\t" \
2506 "ld 11, 0(11)\n\t" \
2507 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2509 "ld 2,-16(11)\n\t" \
2510 VALGRIND_RESTORE_STACK \
2512 : "r"( &_argvec[2] ) \
2513 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2514 lval = (__typeof__( lval ))_res; \
2517 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2519 volatile OrigFn _orig = ( orig ); \
2520 volatile unsigned long _argvec[3 + 11]; \
2521 volatile unsigned long _res; \
2523 _argvec[1] = (unsigned long)_orig.r2; \
2524 _argvec[2] = (unsigned long)_orig.nraddr; \
2525 _argvec[2 + 1] = (unsigned long)arg1; \
2526 _argvec[2 + 2] = (unsigned long)arg2; \
2527 _argvec[2 + 3] = (unsigned long)arg3; \
2528 _argvec[2 + 4] = (unsigned long)arg4; \
2529 _argvec[2 + 5] = (unsigned long)arg5; \
2530 _argvec[2 + 6] = (unsigned long)arg6; \
2531 _argvec[2 + 7] = (unsigned long)arg7; \
2532 _argvec[2 + 8] = (unsigned long)arg8; \
2533 _argvec[2 + 9] = (unsigned long)arg9; \
2534 _argvec[2 + 10] = (unsigned long)arg10; \
2535 _argvec[2 + 11] = (unsigned long)arg11; \
2536 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2537 "std 2,-16(11)\n\t" \
2539 "addi 1,1,-144\n\t" \
2541 "std 3,128(1)\n\t" \
2543 "std 3,120(1)\n\t" \
2545 "std 3,112(1)\n\t" \
2547 "ld 4, 16(11)\n\t" \
2548 "ld 5, 24(11)\n\t" \
2549 "ld 6, 32(11)\n\t" \
2550 "ld 7, 40(11)\n\t" \
2551 "ld 8, 48(11)\n\t" \
2552 "ld 9, 56(11)\n\t" \
2553 "ld 10, 64(11)\n\t" \
2554 "ld 11, 0(11)\n\t" \
2555 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2557 "ld 2,-16(11)\n\t" \
2558 VALGRIND_RESTORE_STACK \
2560 : "r"( &_argvec[2] ) \
2561 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2562 lval = (__typeof__( lval ))_res; \
2565 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2567 volatile OrigFn _orig = ( orig ); \
2568 volatile unsigned long _argvec[3 + 12]; \
2569 volatile unsigned long _res; \
2571 _argvec[1] = (unsigned long)_orig.r2; \
2572 _argvec[2] = (unsigned long)_orig.nraddr; \
2573 _argvec[2 + 1] = (unsigned long)arg1; \
2574 _argvec[2 + 2] = (unsigned long)arg2; \
2575 _argvec[2 + 3] = (unsigned long)arg3; \
2576 _argvec[2 + 4] = (unsigned long)arg4; \
2577 _argvec[2 + 5] = (unsigned long)arg5; \
2578 _argvec[2 + 6] = (unsigned long)arg6; \
2579 _argvec[2 + 7] = (unsigned long)arg7; \
2580 _argvec[2 + 8] = (unsigned long)arg8; \
2581 _argvec[2 + 9] = (unsigned long)arg9; \
2582 _argvec[2 + 10] = (unsigned long)arg10; \
2583 _argvec[2 + 11] = (unsigned long)arg11; \
2584 _argvec[2 + 12] = (unsigned long)arg12; \
2585 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2586 "std 2,-16(11)\n\t" \
2588 "addi 1,1,-144\n\t" \
2590 "std 3,136(1)\n\t" \
2592 "std 3,128(1)\n\t" \
2594 "std 3,120(1)\n\t" \
2596 "std 3,112(1)\n\t" \
2598 "ld 4, 16(11)\n\t" \
2599 "ld 5, 24(11)\n\t" \
2600 "ld 6, 32(11)\n\t" \
2601 "ld 7, 40(11)\n\t" \
2602 "ld 8, 48(11)\n\t" \
2603 "ld 9, 56(11)\n\t" \
2604 "ld 10, 64(11)\n\t" \
2605 "ld 11, 0(11)\n\t" \
2606 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2608 "ld 2,-16(11)\n\t" \
2609 VALGRIND_RESTORE_STACK \
2611 : "r"( &_argvec[2] ) \
2612 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2613 lval = (__typeof__( lval ))_res; \
2620 # if defined( PLAT_arm_linux )
2623 # define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r14"
2638 # define VALGRIND_ALIGN_STACK \
2641 "bic r4, r4, #7\n\t" \
2643 # define VALGRIND_RESTORE_STACK "mov sp, r10\n\t"
2648 # define CALL_FN_W_v( lval, orig ) \
2650 volatile OrigFn _orig = ( orig ); \
2651 volatile unsigned long _argvec[1]; \
2652 volatile unsigned long _res; \
2653 _argvec[0] = (unsigned long)_orig.nraddr; \
2654 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r4, [%1] \n\t" \
2655 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2657 : "0"( &_argvec[0] ) \
2658 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2659 lval = (__typeof__( lval ))_res; \
2662 # define CALL_FN_W_W( lval, orig, arg1 ) \
2664 volatile OrigFn _orig = ( orig ); \
2665 volatile unsigned long _argvec[2]; \
2666 volatile unsigned long _res; \
2667 _argvec[0] = (unsigned long)_orig.nraddr; \
2668 _argvec[1] = (unsigned long)( arg1 ); \
2669 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2670 "ldr r4, [%1] \n\t" \
2671 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2673 : "0"( &_argvec[0] ) \
2674 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2675 lval = (__typeof__( lval ))_res; \
2678 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
2680 volatile OrigFn _orig = ( orig ); \
2681 volatile unsigned long _argvec[3]; \
2682 volatile unsigned long _res; \
2683 _argvec[0] = (unsigned long)_orig.nraddr; \
2684 _argvec[1] = (unsigned long)( arg1 ); \
2685 _argvec[2] = (unsigned long)( arg2 ); \
2686 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2687 "ldr r1, [%1, #8] \n\t" \
2688 "ldr r4, [%1] \n\t" \
2689 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2691 : "0"( &_argvec[0] ) \
2692 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2693 lval = (__typeof__( lval ))_res; \
2696 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
2698 volatile OrigFn _orig = ( orig ); \
2699 volatile unsigned long _argvec[4]; \
2700 volatile unsigned long _res; \
2701 _argvec[0] = (unsigned long)_orig.nraddr; \
2702 _argvec[1] = (unsigned long)( arg1 ); \
2703 _argvec[2] = (unsigned long)( arg2 ); \
2704 _argvec[3] = (unsigned long)( arg3 ); \
2705 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2706 "ldr r1, [%1, #8] \n\t" \
2707 "ldr r2, [%1, #12] \n\t" \
2708 "ldr r4, [%1] \n\t" \
2709 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2711 : "0"( &_argvec[0] ) \
2712 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2713 lval = (__typeof__( lval ))_res; \
2716 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
2718 volatile OrigFn _orig = ( orig ); \
2719 volatile unsigned long _argvec[5]; \
2720 volatile unsigned long _res; \
2721 _argvec[0] = (unsigned long)_orig.nraddr; \
2722 _argvec[1] = (unsigned long)( arg1 ); \
2723 _argvec[2] = (unsigned long)( arg2 ); \
2724 _argvec[3] = (unsigned long)( arg3 ); \
2725 _argvec[4] = (unsigned long)( arg4 ); \
2726 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2727 "ldr r1, [%1, #8] \n\t" \
2728 "ldr r2, [%1, #12] \n\t" \
2729 "ldr r3, [%1, #16] \n\t" \
2730 "ldr r4, [%1] \n\t" \
2731 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2733 : "0"( &_argvec[0] ) \
2734 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2735 lval = (__typeof__( lval ))_res; \
2738 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
2740 volatile OrigFn _orig = ( orig ); \
2741 volatile unsigned long _argvec[6]; \
2742 volatile unsigned long _res; \
2743 _argvec[0] = (unsigned long)_orig.nraddr; \
2744 _argvec[1] = (unsigned long)( arg1 ); \
2745 _argvec[2] = (unsigned long)( arg2 ); \
2746 _argvec[3] = (unsigned long)( arg3 ); \
2747 _argvec[4] = (unsigned long)( arg4 ); \
2748 _argvec[5] = (unsigned long)( arg5 ); \
2749 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2750 "ldr r0, [%1, #20] \n\t" \
2752 "ldr r0, [%1, #4] \n\t" \
2753 "ldr r1, [%1, #8] \n\t" \
2754 "ldr r2, [%1, #12] \n\t" \
2755 "ldr r3, [%1, #16] \n\t" \
2756 "ldr r4, [%1] \n\t" \
2757 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2759 : "0"( &_argvec[0] ) \
2760 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2761 lval = (__typeof__( lval ))_res; \
2764 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
2766 volatile OrigFn _orig = ( orig ); \
2767 volatile unsigned long _argvec[7]; \
2768 volatile unsigned long _res; \
2769 _argvec[0] = (unsigned long)_orig.nraddr; \
2770 _argvec[1] = (unsigned long)( arg1 ); \
2771 _argvec[2] = (unsigned long)( arg2 ); \
2772 _argvec[3] = (unsigned long)( arg3 ); \
2773 _argvec[4] = (unsigned long)( arg4 ); \
2774 _argvec[5] = (unsigned long)( arg5 ); \
2775 _argvec[6] = (unsigned long)( arg6 ); \
2776 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2777 "ldr r1, [%1, #24] \n\t" \
2778 "push {r0, r1} \n\t" \
2779 "ldr r0, [%1, #4] \n\t" \
2780 "ldr r1, [%1, #8] \n\t" \
2781 "ldr r2, [%1, #12] \n\t" \
2782 "ldr r3, [%1, #16] \n\t" \
2783 "ldr r4, [%1] \n\t" \
2784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2786 : "0"( &_argvec[0] ) \
2787 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2788 lval = (__typeof__( lval ))_res; \
2791 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
2793 volatile OrigFn _orig = ( orig ); \
2794 volatile unsigned long _argvec[8]; \
2795 volatile unsigned long _res; \
2796 _argvec[0] = (unsigned long)_orig.nraddr; \
2797 _argvec[1] = (unsigned long)( arg1 ); \
2798 _argvec[2] = (unsigned long)( arg2 ); \
2799 _argvec[3] = (unsigned long)( arg3 ); \
2800 _argvec[4] = (unsigned long)( arg4 ); \
2801 _argvec[5] = (unsigned long)( arg5 ); \
2802 _argvec[6] = (unsigned long)( arg6 ); \
2803 _argvec[7] = (unsigned long)( arg7 ); \
2804 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2805 "ldr r0, [%1, #20] \n\t" \
2806 "ldr r1, [%1, #24] \n\t" \
2807 "ldr r2, [%1, #28] \n\t" \
2808 "push {r0, r1, r2} \n\t" \
2809 "ldr r0, [%1, #4] \n\t" \
2810 "ldr r1, [%1, #8] \n\t" \
2811 "ldr r2, [%1, #12] \n\t" \
2812 "ldr r3, [%1, #16] \n\t" \
2813 "ldr r4, [%1] \n\t" \
2814 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2816 : "0"( &_argvec[0] ) \
2817 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2818 lval = (__typeof__( lval ))_res; \
2821 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
2823 volatile OrigFn _orig = ( orig ); \
2824 volatile unsigned long _argvec[9]; \
2825 volatile unsigned long _res; \
2826 _argvec[0] = (unsigned long)_orig.nraddr; \
2827 _argvec[1] = (unsigned long)( arg1 ); \
2828 _argvec[2] = (unsigned long)( arg2 ); \
2829 _argvec[3] = (unsigned long)( arg3 ); \
2830 _argvec[4] = (unsigned long)( arg4 ); \
2831 _argvec[5] = (unsigned long)( arg5 ); \
2832 _argvec[6] = (unsigned long)( arg6 ); \
2833 _argvec[7] = (unsigned long)( arg7 ); \
2834 _argvec[8] = (unsigned long)( arg8 ); \
2835 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2836 "ldr r1, [%1, #24] \n\t" \
2837 "ldr r2, [%1, #28] \n\t" \
2838 "ldr r3, [%1, #32] \n\t" \
2839 "push {r0, r1, r2, r3} \n\t" \
2840 "ldr r0, [%1, #4] \n\t" \
2841 "ldr r1, [%1, #8] \n\t" \
2842 "ldr r2, [%1, #12] \n\t" \
2843 "ldr r3, [%1, #16] \n\t" \
2844 "ldr r4, [%1] \n\t" \
2845 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2847 : "0"( &_argvec[0] ) \
2848 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2849 lval = (__typeof__( lval ))_res; \
2852 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
2854 volatile OrigFn _orig = ( orig ); \
2855 volatile unsigned long _argvec[10]; \
2856 volatile unsigned long _res; \
2857 _argvec[0] = (unsigned long)_orig.nraddr; \
2858 _argvec[1] = (unsigned long)( arg1 ); \
2859 _argvec[2] = (unsigned long)( arg2 ); \
2860 _argvec[3] = (unsigned long)( arg3 ); \
2861 _argvec[4] = (unsigned long)( arg4 ); \
2862 _argvec[5] = (unsigned long)( arg5 ); \
2863 _argvec[6] = (unsigned long)( arg6 ); \
2864 _argvec[7] = (unsigned long)( arg7 ); \
2865 _argvec[8] = (unsigned long)( arg8 ); \
2866 _argvec[9] = (unsigned long)( arg9 ); \
2867 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2868 "ldr r0, [%1, #20] \n\t" \
2869 "ldr r1, [%1, #24] \n\t" \
2870 "ldr r2, [%1, #28] \n\t" \
2871 "ldr r3, [%1, #32] \n\t" \
2872 "ldr r4, [%1, #36] \n\t" \
2873 "push {r0, r1, r2, r3, r4} \n\t" \
2874 "ldr r0, [%1, #4] \n\t" \
2875 "ldr r1, [%1, #8] \n\t" \
2876 "ldr r2, [%1, #12] \n\t" \
2877 "ldr r3, [%1, #16] \n\t" \
2878 "ldr r4, [%1] \n\t" \
2879 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2881 : "0"( &_argvec[0] ) \
2882 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2883 lval = (__typeof__( lval ))_res; \
2886 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2888 volatile OrigFn _orig = ( orig ); \
2889 volatile unsigned long _argvec[11]; \
2890 volatile unsigned long _res; \
2891 _argvec[0] = (unsigned long)_orig.nraddr; \
2892 _argvec[1] = (unsigned long)( arg1 ); \
2893 _argvec[2] = (unsigned long)( arg2 ); \
2894 _argvec[3] = (unsigned long)( arg3 ); \
2895 _argvec[4] = (unsigned long)( arg4 ); \
2896 _argvec[5] = (unsigned long)( arg5 ); \
2897 _argvec[6] = (unsigned long)( arg6 ); \
2898 _argvec[7] = (unsigned long)( arg7 ); \
2899 _argvec[8] = (unsigned long)( arg8 ); \
2900 _argvec[9] = (unsigned long)( arg9 ); \
2901 _argvec[10] = (unsigned long)( arg10 ); \
2902 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2904 "ldr r0, [%1, #20] \n\t" \
2905 "ldr r1, [%1, #24] \n\t" \
2906 "ldr r2, [%1, #28] \n\t" \
2907 "ldr r3, [%1, #32] \n\t" \
2908 "ldr r4, [%1, #36] \n\t" \
2909 "push {r0, r1, r2, r3, r4} \n\t" \
2910 "ldr r0, [%1, #4] \n\t" \
2911 "ldr r1, [%1, #8] \n\t" \
2912 "ldr r2, [%1, #12] \n\t" \
2913 "ldr r3, [%1, #16] \n\t" \
2914 "ldr r4, [%1] \n\t" \
2915 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2917 : "0"( &_argvec[0] ) \
2918 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2919 lval = (__typeof__( lval ))_res; \
2922 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2924 volatile OrigFn _orig = ( orig ); \
2925 volatile unsigned long _argvec[12]; \
2926 volatile unsigned long _res; \
2927 _argvec[0] = (unsigned long)_orig.nraddr; \
2928 _argvec[1] = (unsigned long)( arg1 ); \
2929 _argvec[2] = (unsigned long)( arg2 ); \
2930 _argvec[3] = (unsigned long)( arg3 ); \
2931 _argvec[4] = (unsigned long)( arg4 ); \
2932 _argvec[5] = (unsigned long)( arg5 ); \
2933 _argvec[6] = (unsigned long)( arg6 ); \
2934 _argvec[7] = (unsigned long)( arg7 ); \
2935 _argvec[8] = (unsigned long)( arg8 ); \
2936 _argvec[9] = (unsigned long)( arg9 ); \
2937 _argvec[10] = (unsigned long)( arg10 ); \
2938 _argvec[11] = (unsigned long)( arg11 ); \
2939 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2940 "ldr r0, [%1, #40] \n\t" \
2941 "ldr r1, [%1, #44] \n\t" \
2942 "push {r0, r1} \n\t" \
2943 "ldr r0, [%1, #20] \n\t" \
2944 "ldr r1, [%1, #24] \n\t" \
2945 "ldr r2, [%1, #28] \n\t" \
2946 "ldr r3, [%1, #32] \n\t" \
2947 "ldr r4, [%1, #36] \n\t" \
2948 "push {r0, r1, r2, r3, r4} \n\t" \
2949 "ldr r0, [%1, #4] \n\t" \
2950 "ldr r1, [%1, #8] \n\t" \
2951 "ldr r2, [%1, #12] \n\t" \
2952 "ldr r3, [%1, #16] \n\t" \
2953 "ldr r4, [%1] \n\t" \
2954 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2956 : "0"( &_argvec[0] ) \
2957 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2958 lval = (__typeof__( lval ))_res; \
2961 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2963 volatile OrigFn _orig = ( orig ); \
2964 volatile unsigned long _argvec[13]; \
2965 volatile unsigned long _res; \
2966 _argvec[0] = (unsigned long)_orig.nraddr; \
2967 _argvec[1] = (unsigned long)( arg1 ); \
2968 _argvec[2] = (unsigned long)( arg2 ); \
2969 _argvec[3] = (unsigned long)( arg3 ); \
2970 _argvec[4] = (unsigned long)( arg4 ); \
2971 _argvec[5] = (unsigned long)( arg5 ); \
2972 _argvec[6] = (unsigned long)( arg6 ); \
2973 _argvec[7] = (unsigned long)( arg7 ); \
2974 _argvec[8] = (unsigned long)( arg8 ); \
2975 _argvec[9] = (unsigned long)( arg9 ); \
2976 _argvec[10] = (unsigned long)( arg10 ); \
2977 _argvec[11] = (unsigned long)( arg11 ); \
2978 _argvec[12] = (unsigned long)( arg12 ); \
2979 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2980 "ldr r1, [%1, #44] \n\t" \
2981 "ldr r2, [%1, #48] \n\t" \
2982 "push {r0, r1, r2} \n\t" \
2983 "ldr r0, [%1, #20] \n\t" \
2984 "ldr r1, [%1, #24] \n\t" \
2985 "ldr r2, [%1, #28] \n\t" \
2986 "ldr r3, [%1, #32] \n\t" \
2987 "ldr r4, [%1, #36] \n\t" \
2988 "push {r0, r1, r2, r3, r4} \n\t" \
2989 "ldr r0, [%1, #4] \n\t" \
2990 "ldr r1, [%1, #8] \n\t" \
2991 "ldr r2, [%1, #12] \n\t" \
2992 "ldr r3, [%1, #16] \n\t" \
2993 "ldr r4, [%1] \n\t" \
2994 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2996 : "0"( &_argvec[0] ) \
2997 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2998 lval = (__typeof__( lval ))_res; \
3005 # if defined( PLAT_s390x_linux )
3011 # if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM )
3012 # define __FRAME_POINTER , "d"( __builtin_dwarf_cfa() )
3013 # define VALGRIND_CFI_PROLOGUE \
3014 ".cfi_remember_state\n\t" \
3018 ".cfi_def_cfa r11, 0\n\t"
3019 # define VALGRIND_CFI_EPILOGUE \
3021 ".cfi_restore_state\n\t"
3023 # define __FRAME_POINTER
3024 # define VALGRIND_CFI_PROLOGUE "lgr 1,%1\n\t"
3025 # define VALGRIND_CFI_EPILOGUE
3037 # define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
3048 # define CALL_FN_W_v( lval, orig ) \
3050 volatile OrigFn _orig = ( orig ); \
3051 volatile unsigned long _argvec[1]; \
3052 volatile unsigned long _res; \
3053 _argvec[0] = (unsigned long)_orig.nraddr; \
3054 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3056 VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3057 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3059 : "d"(&_argvec[0])__FRAME_POINTER \
3060 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3061 lval = (__typeof__( lval ))_res; \
3065 # define CALL_FN_W_W( lval, orig, arg1 ) \
3067 volatile OrigFn _orig = ( orig ); \
3068 volatile unsigned long _argvec[2]; \
3069 volatile unsigned long _res; \
3070 _argvec[0] = (unsigned long)_orig.nraddr; \
3071 _argvec[1] = (unsigned long)arg1; \
3072 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3074 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3075 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3077 : "a"(&_argvec[0])__FRAME_POINTER \
3078 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3079 lval = (__typeof__( lval ))_res; \
3082 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3084 volatile OrigFn _orig = ( orig ); \
3085 volatile unsigned long _argvec[3]; \
3086 volatile unsigned long _res; \
3087 _argvec[0] = (unsigned long)_orig.nraddr; \
3088 _argvec[1] = (unsigned long)arg1; \
3089 _argvec[2] = (unsigned long)arg2; \
3090 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3093 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3094 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3096 : "a"(&_argvec[0])__FRAME_POINTER \
3097 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3098 lval = (__typeof__( lval ))_res; \
3101 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3103 volatile OrigFn _orig = ( orig ); \
3104 volatile unsigned long _argvec[4]; \
3105 volatile unsigned long _res; \
3106 _argvec[0] = (unsigned long)_orig.nraddr; \
3107 _argvec[1] = (unsigned long)arg1; \
3108 _argvec[2] = (unsigned long)arg2; \
3109 _argvec[3] = (unsigned long)arg3; \
3110 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3114 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3115 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3117 : "a"(&_argvec[0])__FRAME_POINTER \
3118 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3119 lval = (__typeof__( lval ))_res; \
3122 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3124 volatile OrigFn _orig = ( orig ); \
3125 volatile unsigned long _argvec[5]; \
3126 volatile unsigned long _res; \
3127 _argvec[0] = (unsigned long)_orig.nraddr; \
3128 _argvec[1] = (unsigned long)arg1; \
3129 _argvec[2] = (unsigned long)arg2; \
3130 _argvec[3] = (unsigned long)arg3; \
3131 _argvec[4] = (unsigned long)arg4; \
3132 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3137 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3138 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3140 : "a"(&_argvec[0])__FRAME_POINTER \
3141 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3142 lval = (__typeof__( lval ))_res; \
3145 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3147 volatile OrigFn _orig = ( orig ); \
3148 volatile unsigned long _argvec[6]; \
3149 volatile unsigned long _res; \
3150 _argvec[0] = (unsigned long)_orig.nraddr; \
3151 _argvec[1] = (unsigned long)arg1; \
3152 _argvec[2] = (unsigned long)arg2; \
3153 _argvec[3] = (unsigned long)arg3; \
3154 _argvec[4] = (unsigned long)arg4; \
3155 _argvec[5] = (unsigned long)arg5; \
3156 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3162 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3163 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3165 : "a"(&_argvec[0])__FRAME_POINTER \
3166 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3167 lval = (__typeof__( lval ))_res; \
3170 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
3172 volatile OrigFn _orig = ( orig ); \
3173 volatile unsigned long _argvec[7]; \
3174 volatile unsigned long _res; \
3175 _argvec[0] = (unsigned long)_orig.nraddr; \
3176 _argvec[1] = (unsigned long)arg1; \
3177 _argvec[2] = (unsigned long)arg2; \
3178 _argvec[3] = (unsigned long)arg3; \
3179 _argvec[4] = (unsigned long)arg4; \
3180 _argvec[5] = (unsigned long)arg5; \
3181 _argvec[6] = (unsigned long)arg6; \
3182 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-168\n\t" \
3188 "mvc 160(8,15), 48(1)\n\t" \
3189 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3190 "aghi 15,168\n\t" VALGRIND_CFI_EPILOGUE \
3192 : "a"(&_argvec[0])__FRAME_POINTER \
3193 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3194 lval = (__typeof__( lval ))_res; \
3197 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
3199 volatile OrigFn _orig = ( orig ); \
3200 volatile unsigned long _argvec[8]; \
3201 volatile unsigned long _res; \
3202 _argvec[0] = (unsigned long)_orig.nraddr; \
3203 _argvec[1] = (unsigned long)arg1; \
3204 _argvec[2] = (unsigned long)arg2; \
3205 _argvec[3] = (unsigned long)arg3; \
3206 _argvec[4] = (unsigned long)arg4; \
3207 _argvec[5] = (unsigned long)arg5; \
3208 _argvec[6] = (unsigned long)arg6; \
3209 _argvec[7] = (unsigned long)arg7; \
3210 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-176\n\t" \
3216 "mvc 160(8,15), 48(1)\n\t" \
3217 "mvc 168(8,15), 56(1)\n\t" \
3218 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3219 "aghi 15,176\n\t" VALGRIND_CFI_EPILOGUE \
3221 : "a"(&_argvec[0])__FRAME_POINTER \
3222 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3223 lval = (__typeof__( lval ))_res; \
3226 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
3228 volatile OrigFn _orig = ( orig ); \
3229 volatile unsigned long _argvec[9]; \
3230 volatile unsigned long _res; \
3231 _argvec[0] = (unsigned long)_orig.nraddr; \
3232 _argvec[1] = (unsigned long)arg1; \
3233 _argvec[2] = (unsigned long)arg2; \
3234 _argvec[3] = (unsigned long)arg3; \
3235 _argvec[4] = (unsigned long)arg4; \
3236 _argvec[5] = (unsigned long)arg5; \
3237 _argvec[6] = (unsigned long)arg6; \
3238 _argvec[7] = (unsigned long)arg7; \
3239 _argvec[8] = (unsigned long)arg8; \
3240 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-184\n\t" \
3246 "mvc 160(8,15), 48(1)\n\t" \
3247 "mvc 168(8,15), 56(1)\n\t" \
3248 "mvc 176(8,15), 64(1)\n\t" \
3249 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3250 "aghi 15,184\n\t" VALGRIND_CFI_EPILOGUE \
3252 : "a"(&_argvec[0])__FRAME_POINTER \
3253 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3254 lval = (__typeof__( lval ))_res; \
3257 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
3259 volatile OrigFn _orig = ( orig ); \
3260 volatile unsigned long _argvec[10]; \
3261 volatile unsigned long _res; \
3262 _argvec[0] = (unsigned long)_orig.nraddr; \
3263 _argvec[1] = (unsigned long)arg1; \
3264 _argvec[2] = (unsigned long)arg2; \
3265 _argvec[3] = (unsigned long)arg3; \
3266 _argvec[4] = (unsigned long)arg4; \
3267 _argvec[5] = (unsigned long)arg5; \
3268 _argvec[6] = (unsigned long)arg6; \
3269 _argvec[7] = (unsigned long)arg7; \
3270 _argvec[8] = (unsigned long)arg8; \
3271 _argvec[9] = (unsigned long)arg9; \
3272 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-192\n\t" \
3278 "mvc 160(8,15), 48(1)\n\t" \
3279 "mvc 168(8,15), 56(1)\n\t" \
3280 "mvc 176(8,15), 64(1)\n\t" \
3281 "mvc 184(8,15), 72(1)\n\t" \
3282 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3283 "aghi 15,192\n\t" VALGRIND_CFI_EPILOGUE \
3285 : "a"(&_argvec[0])__FRAME_POINTER \
3286 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3287 lval = (__typeof__( lval ))_res; \
3290 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
3292 volatile OrigFn _orig = ( orig ); \
3293 volatile unsigned long _argvec[11]; \
3294 volatile unsigned long _res; \
3295 _argvec[0] = (unsigned long)_orig.nraddr; \
3296 _argvec[1] = (unsigned long)arg1; \
3297 _argvec[2] = (unsigned long)arg2; \
3298 _argvec[3] = (unsigned long)arg3; \
3299 _argvec[4] = (unsigned long)arg4; \
3300 _argvec[5] = (unsigned long)arg5; \
3301 _argvec[6] = (unsigned long)arg6; \
3302 _argvec[7] = (unsigned long)arg7; \
3303 _argvec[8] = (unsigned long)arg8; \
3304 _argvec[9] = (unsigned long)arg9; \
3305 _argvec[10] = (unsigned long)arg10; \
3306 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-200\n\t" \
3312 "mvc 160(8,15), 48(1)\n\t" \
3313 "mvc 168(8,15), 56(1)\n\t" \
3314 "mvc 176(8,15), 64(1)\n\t" \
3315 "mvc 184(8,15), 72(1)\n\t" \
3316 "mvc 192(8,15), 80(1)\n\t" \
3317 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3318 "aghi 15,200\n\t" VALGRIND_CFI_EPILOGUE \
3320 : "a"(&_argvec[0])__FRAME_POINTER \
3321 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3322 lval = (__typeof__( lval ))_res; \
3325 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
3327 volatile OrigFn _orig = ( orig ); \
3328 volatile unsigned long _argvec[12]; \
3329 volatile unsigned long _res; \
3330 _argvec[0] = (unsigned long)_orig.nraddr; \
3331 _argvec[1] = (unsigned long)arg1; \
3332 _argvec[2] = (unsigned long)arg2; \
3333 _argvec[3] = (unsigned long)arg3; \
3334 _argvec[4] = (unsigned long)arg4; \
3335 _argvec[5] = (unsigned long)arg5; \
3336 _argvec[6] = (unsigned long)arg6; \
3337 _argvec[7] = (unsigned long)arg7; \
3338 _argvec[8] = (unsigned long)arg8; \
3339 _argvec[9] = (unsigned long)arg9; \
3340 _argvec[10] = (unsigned long)arg10; \
3341 _argvec[11] = (unsigned long)arg11; \
3342 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-208\n\t" \
3348 "mvc 160(8,15), 48(1)\n\t" \
3349 "mvc 168(8,15), 56(1)\n\t" \
3350 "mvc 176(8,15), 64(1)\n\t" \
3351 "mvc 184(8,15), 72(1)\n\t" \
3352 "mvc 192(8,15), 80(1)\n\t" \
3353 "mvc 200(8,15), 88(1)\n\t" \
3354 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3355 "aghi 15,208\n\t" VALGRIND_CFI_EPILOGUE \
3357 : "a"(&_argvec[0])__FRAME_POINTER \
3358 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3359 lval = (__typeof__( lval ))_res; \
3362 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
3364 volatile OrigFn _orig = ( orig ); \
3365 volatile unsigned long _argvec[13]; \
3366 volatile unsigned long _res; \
3367 _argvec[0] = (unsigned long)_orig.nraddr; \
3368 _argvec[1] = (unsigned long)arg1; \
3369 _argvec[2] = (unsigned long)arg2; \
3370 _argvec[3] = (unsigned long)arg3; \
3371 _argvec[4] = (unsigned long)arg4; \
3372 _argvec[5] = (unsigned long)arg5; \
3373 _argvec[6] = (unsigned long)arg6; \
3374 _argvec[7] = (unsigned long)arg7; \
3375 _argvec[8] = (unsigned long)arg8; \
3376 _argvec[9] = (unsigned long)arg9; \
3377 _argvec[10] = (unsigned long)arg10; \
3378 _argvec[11] = (unsigned long)arg11; \
3379 _argvec[12] = (unsigned long)arg12; \
3380 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-216\n\t" \
3386 "mvc 160(8,15), 48(1)\n\t" \
3387 "mvc 168(8,15), 56(1)\n\t" \
3388 "mvc 176(8,15), 64(1)\n\t" \
3389 "mvc 184(8,15), 72(1)\n\t" \
3390 "mvc 192(8,15), 80(1)\n\t" \
3391 "mvc 200(8,15), 88(1)\n\t" \
3392 "mvc 208(8,15), 96(1)\n\t" \
3393 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3394 "aghi 15,216\n\t" VALGRIND_CFI_EPILOGUE \
3396 : "a"(&_argvec[0])__FRAME_POINTER \
3397 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3398 lval = (__typeof__( lval ))_res; \
3405 # if defined( PLAT_mips32_linux )
3408 # define __CALLER_SAVED_REGS \
3409 "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31"
3414 # define CALL_FN_W_v( lval, orig ) \
3416 volatile OrigFn _orig = ( orig ); \
3417 volatile unsigned long _argvec[1]; \
3418 volatile unsigned long _res; \
3419 _argvec[0] = (unsigned long)_orig.nraddr; \
3420 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3421 "sw $28, 0($29) \n\t" \
3422 "sw $31, 4($29) \n\t" \
3423 "subu $29, $29, 16 \n\t" \
3424 "lw $25, 0(%1) \n\t" \
3425 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16\n\t" \
3426 "lw $28, 0($29) \n\t" \
3427 "lw $31, 4($29) \n\t" \
3428 "addu $29, $29, 8 \n\t" \
3431 : "0"( &_argvec[0] ) \
3432 : "memory", __CALLER_SAVED_REGS ); \
3433 lval = (__typeof__( lval ))_res; \
3436 # define CALL_FN_W_W( lval, orig, arg1 ) \
3438 volatile OrigFn _orig = ( orig ); \
3439 volatile unsigned long _argvec[2]; \
3440 volatile unsigned long _res; \
3441 _argvec[0] = (unsigned long)_orig.nraddr; \
3442 _argvec[1] = (unsigned long)( arg1 ); \
3443 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3444 "sw $28, 0($29) \n\t" \
3445 "sw $31, 4($29) \n\t" \
3446 "subu $29, $29, 16 \n\t" \
3447 "lw $4, 4(%1) \n\t" \
3448 "lw $25, 0(%1) \n\t" \
3449 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3450 "lw $28, 0($29) \n\t" \
3451 "lw $31, 4($29) \n\t" \
3452 "addu $29, $29, 8 \n\t" \
3455 : "0"( &_argvec[0] ) \
3456 : "memory", __CALLER_SAVED_REGS ); \
3457 lval = (__typeof__( lval ))_res; \
3460 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3462 volatile OrigFn _orig = ( orig ); \
3463 volatile unsigned long _argvec[3]; \
3464 volatile unsigned long _res; \
3465 _argvec[0] = (unsigned long)_orig.nraddr; \
3466 _argvec[1] = (unsigned long)( arg1 ); \
3467 _argvec[2] = (unsigned long)( arg2 ); \
3468 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3469 "sw $28, 0($29) \n\t" \
3470 "sw $31, 4($29) \n\t" \
3471 "subu $29, $29, 16 \n\t" \
3472 "lw $4, 4(%1) \n\t" \
3473 "lw $5, 8(%1) \n\t" \
3474 "lw $25, 0(%1) \n\t" \
3475 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3476 "lw $28, 0($29) \n\t" \
3477 "lw $31, 4($29) \n\t" \
3478 "addu $29, $29, 8 \n\t" \
3481 : "0"( &_argvec[0] ) \
3482 : "memory", __CALLER_SAVED_REGS ); \
3483 lval = (__typeof__( lval ))_res; \
3486 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3488 volatile OrigFn _orig = ( orig ); \
3489 volatile unsigned long _argvec[4]; \
3490 volatile unsigned long _res; \
3491 _argvec[0] = (unsigned long)_orig.nraddr; \
3492 _argvec[1] = (unsigned long)( arg1 ); \
3493 _argvec[2] = (unsigned long)( arg2 ); \
3494 _argvec[3] = (unsigned long)( arg3 ); \
3495 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3496 "sw $28, 0($29) \n\t" \
3497 "sw $31, 4($29) \n\t" \
3498 "subu $29, $29, 16 \n\t" \
3499 "lw $4, 4(%1) \n\t" \
3500 "lw $5, 8(%1) \n\t" \
3501 "lw $6, 12(%1) \n\t" \
3502 "lw $25, 0(%1) \n\t" \
3503 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3504 "lw $28, 0($29) \n\t" \
3505 "lw $31, 4($29) \n\t" \
3506 "addu $29, $29, 8 \n\t" \
3509 : "0"( &_argvec[0] ) \
3510 : "memory", __CALLER_SAVED_REGS ); \
3511 lval = (__typeof__( lval ))_res; \
3514 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3516 volatile OrigFn _orig = ( orig ); \
3517 volatile unsigned long _argvec[5]; \
3518 volatile unsigned long _res; \
3519 _argvec[0] = (unsigned long)_orig.nraddr; \
3520 _argvec[1] = (unsigned long)( arg1 ); \
3521 _argvec[2] = (unsigned long)( arg2 ); \
3522 _argvec[3] = (unsigned long)( arg3 ); \
3523 _argvec[4] = (unsigned long)( arg4 ); \
3524 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3525 "sw $28, 0($29) \n\t" \
3526 "sw $31, 4($29) \n\t" \
3527 "subu $29, $29, 16 \n\t" \
3528 "lw $4, 4(%1) \n\t" \
3529 "lw $5, 8(%1) \n\t" \
3530 "lw $6, 12(%1) \n\t" \
3531 "lw $7, 16(%1) \n\t" \
3532 "lw $25, 0(%1) \n\t" \
3533 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3534 "lw $28, 0($29) \n\t" \
3535 "lw $31, 4($29) \n\t" \
3536 "addu $29, $29, 8 \n\t" \
3539 : "0"( &_argvec[0] ) \
3540 : "memory", __CALLER_SAVED_REGS ); \
3541 lval = (__typeof__( lval ))_res; \
3544 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3546 volatile OrigFn _orig = ( orig ); \
3547 volatile unsigned long _argvec[6]; \
3548 volatile unsigned long _res; \
3549 _argvec[0] = (unsigned long)_orig.nraddr; \
3550 _argvec[1] = (unsigned long)( arg1 ); \
3551 _argvec[2] = (unsigned long)( arg2 ); \
3552 _argvec[3] = (unsigned long)( arg3 ); \
3553 _argvec[4] = (unsigned long)( arg4 ); \
3554 _argvec[5] = (unsigned long)( arg5 ); \
3555 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3556 "sw $28, 0($29) \n\t" \
3557 "sw $31, 4($29) \n\t" \
3558 "lw $4, 20(%1) \n\t" \
3559 "subu $29, $29, 24\n\t" \
3560 "sw $4, 16($29) \n\t" \
3561 "lw $4, 4(%1) \n\t" \
3562 "lw $5, 8(%1) \n\t" \
3563 "lw $6, 12(%1) \n\t" \
3564 "lw $7, 16(%1) \n\t" \
3565 "lw $25, 0(%1) \n\t" \
3566 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 24 \n\t" \
3567 "lw $28, 0($29) \n\t" \
3568 "lw $31, 4($29) \n\t" \
3569 "addu $29, $29, 8 \n\t" \
3572 : "0"( &_argvec[0] ) \
3573 : "memory", __CALLER_SAVED_REGS ); \
3574 lval = (__typeof__( lval ))_res; \
3576 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
3578 volatile OrigFn _orig = ( orig ); \
3579 volatile unsigned long _argvec[7]; \
3580 volatile unsigned long _res; \
3581 _argvec[0] = (unsigned long)_orig.nraddr; \
3582 _argvec[1] = (unsigned long)( arg1 ); \
3583 _argvec[2] = (unsigned long)( arg2 ); \
3584 _argvec[3] = (unsigned long)( arg3 ); \
3585 _argvec[4] = (unsigned long)( arg4 ); \
3586 _argvec[5] = (unsigned long)( arg5 ); \
3587 _argvec[6] = (unsigned long)( arg6 ); \
3588 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3589 "sw $28, 0($29) \n\t" \
3590 "sw $31, 4($29) \n\t" \
3591 "lw $4, 20(%1) \n\t" \
3592 "subu $29, $29, 32\n\t" \
3593 "sw $4, 16($29) \n\t" \
3594 "lw $4, 24(%1) \n\t" \
3596 "sw $4, 20($29) \n\t" \
3597 "lw $4, 4(%1) \n\t" \
3598 "lw $5, 8(%1) \n\t" \
3599 "lw $6, 12(%1) \n\t" \
3600 "lw $7, 16(%1) \n\t" \
3601 "lw $25, 0(%1) \n\t" \
3602 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \
3603 "lw $28, 0($29) \n\t" \
3604 "lw $31, 4($29) \n\t" \
3605 "addu $29, $29, 8 \n\t" \
3608 : "0"( &_argvec[0] ) \
3609 : "memory", __CALLER_SAVED_REGS ); \
3610 lval = (__typeof__( lval ))_res; \
3613 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
3615 volatile OrigFn _orig = ( orig ); \
3616 volatile unsigned long _argvec[8]; \
3617 volatile unsigned long _res; \
3618 _argvec[0] = (unsigned long)_orig.nraddr; \
3619 _argvec[1] = (unsigned long)( arg1 ); \
3620 _argvec[2] = (unsigned long)( arg2 ); \
3621 _argvec[3] = (unsigned long)( arg3 ); \
3622 _argvec[4] = (unsigned long)( arg4 ); \
3623 _argvec[5] = (unsigned long)( arg5 ); \
3624 _argvec[6] = (unsigned long)( arg6 ); \
3625 _argvec[7] = (unsigned long)( arg7 ); \
3626 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3627 "sw $28, 0($29) \n\t" \
3628 "sw $31, 4($29) \n\t" \
3629 "lw $4, 20(%1) \n\t" \
3630 "subu $29, $29, 32\n\t" \
3631 "sw $4, 16($29) \n\t" \
3632 "lw $4, 24(%1) \n\t" \
3633 "sw $4, 20($29) \n\t" \
3634 "lw $4, 28(%1) \n\t" \
3635 "sw $4, 24($29) \n\t" \
3636 "lw $4, 4(%1) \n\t" \
3637 "lw $5, 8(%1) \n\t" \
3638 "lw $6, 12(%1) \n\t" \
3639 "lw $7, 16(%1) \n\t" \
3640 "lw $25, 0(%1) \n\t" \
3641 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \
3642 "lw $28, 0($29) \n\t" \
3643 "lw $31, 4($29) \n\t" \
3644 "addu $29, $29, 8 \n\t" \
3647 : "0"( &_argvec[0] ) \
3648 : "memory", __CALLER_SAVED_REGS ); \
3649 lval = (__typeof__( lval ))_res; \
3652 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
3654 volatile OrigFn _orig = ( orig ); \
3655 volatile unsigned long _argvec[9]; \
3656 volatile unsigned long _res; \
3657 _argvec[0] = (unsigned long)_orig.nraddr; \
3658 _argvec[1] = (unsigned long)( arg1 ); \
3659 _argvec[2] = (unsigned long)( arg2 ); \
3660 _argvec[3] = (unsigned long)( arg3 ); \
3661 _argvec[4] = (unsigned long)( arg4 ); \
3662 _argvec[5] = (unsigned long)( arg5 ); \
3663 _argvec[6] = (unsigned long)( arg6 ); \
3664 _argvec[7] = (unsigned long)( arg7 ); \
3665 _argvec[8] = (unsigned long)( arg8 ); \
3666 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3667 "sw $28, 0($29) \n\t" \
3668 "sw $31, 4($29) \n\t" \
3669 "lw $4, 20(%1) \n\t" \
3670 "subu $29, $29, 40\n\t" \
3671 "sw $4, 16($29) \n\t" \
3672 "lw $4, 24(%1) \n\t" \
3673 "sw $4, 20($29) \n\t" \
3674 "lw $4, 28(%1) \n\t" \
3675 "sw $4, 24($29) \n\t" \
3676 "lw $4, 32(%1) \n\t" \
3677 "sw $4, 28($29) \n\t" \
3678 "lw $4, 4(%1) \n\t" \
3679 "lw $5, 8(%1) \n\t" \
3680 "lw $6, 12(%1) \n\t" \
3681 "lw $7, 16(%1) \n\t" \
3682 "lw $25, 0(%1) \n\t" \
3683 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \
3684 "lw $28, 0($29) \n\t" \
3685 "lw $31, 4($29) \n\t" \
3686 "addu $29, $29, 8 \n\t" \
3689 : "0"( &_argvec[0] ) \
3690 : "memory", __CALLER_SAVED_REGS ); \
3691 lval = (__typeof__( lval ))_res; \
3694 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
3696 volatile OrigFn _orig = ( orig ); \
3697 volatile unsigned long _argvec[10]; \
3698 volatile unsigned long _res; \
3699 _argvec[0] = (unsigned long)_orig.nraddr; \
3700 _argvec[1] = (unsigned long)( arg1 ); \
3701 _argvec[2] = (unsigned long)( arg2 ); \
3702 _argvec[3] = (unsigned long)( arg3 ); \
3703 _argvec[4] = (unsigned long)( arg4 ); \
3704 _argvec[5] = (unsigned long)( arg5 ); \
3705 _argvec[6] = (unsigned long)( arg6 ); \
3706 _argvec[7] = (unsigned long)( arg7 ); \
3707 _argvec[8] = (unsigned long)( arg8 ); \
3708 _argvec[9] = (unsigned long)( arg9 ); \
3709 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3710 "sw $28, 0($29) \n\t" \
3711 "sw $31, 4($29) \n\t" \
3712 "lw $4, 20(%1) \n\t" \
3713 "subu $29, $29, 40\n\t" \
3714 "sw $4, 16($29) \n\t" \
3715 "lw $4, 24(%1) \n\t" \
3716 "sw $4, 20($29) \n\t" \
3717 "lw $4, 28(%1) \n\t" \
3718 "sw $4, 24($29) \n\t" \
3719 "lw $4, 32(%1) \n\t" \
3720 "sw $4, 28($29) \n\t" \
3721 "lw $4, 36(%1) \n\t" \
3722 "sw $4, 32($29) \n\t" \
3723 "lw $4, 4(%1) \n\t" \
3724 "lw $5, 8(%1) \n\t" \
3725 "lw $6, 12(%1) \n\t" \
3726 "lw $7, 16(%1) \n\t" \
3727 "lw $25, 0(%1) \n\t" \
3728 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \
3729 "lw $28, 0($29) \n\t" \
3730 "lw $31, 4($29) \n\t" \
3731 "addu $29, $29, 8 \n\t" \
3734 : "0"( &_argvec[0] ) \
3735 : "memory", __CALLER_SAVED_REGS ); \
3736 lval = (__typeof__( lval ))_res; \
3739 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
3741 volatile OrigFn _orig = ( orig ); \
3742 volatile unsigned long _argvec[11]; \
3743 volatile unsigned long _res; \
3744 _argvec[0] = (unsigned long)_orig.nraddr; \
3745 _argvec[1] = (unsigned long)( arg1 ); \
3746 _argvec[2] = (unsigned long)( arg2 ); \
3747 _argvec[3] = (unsigned long)( arg3 ); \
3748 _argvec[4] = (unsigned long)( arg4 ); \
3749 _argvec[5] = (unsigned long)( arg5 ); \
3750 _argvec[6] = (unsigned long)( arg6 ); \
3751 _argvec[7] = (unsigned long)( arg7 ); \
3752 _argvec[8] = (unsigned long)( arg8 ); \
3753 _argvec[9] = (unsigned long)( arg9 ); \
3754 _argvec[10] = (unsigned long)( arg10 ); \
3755 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3756 "sw $28, 0($29) \n\t" \
3757 "sw $31, 4($29) \n\t" \
3758 "lw $4, 20(%1) \n\t" \
3759 "subu $29, $29, 48\n\t" \
3760 "sw $4, 16($29) \n\t" \
3761 "lw $4, 24(%1) \n\t" \
3762 "sw $4, 20($29) \n\t" \
3763 "lw $4, 28(%1) \n\t" \
3764 "sw $4, 24($29) \n\t" \
3765 "lw $4, 32(%1) \n\t" \
3766 "sw $4, 28($29) \n\t" \
3767 "lw $4, 36(%1) \n\t" \
3768 "sw $4, 32($29) \n\t" \
3769 "lw $4, 40(%1) \n\t" \
3770 "sw $4, 36($29) \n\t" \
3771 "lw $4, 4(%1) \n\t" \
3772 "lw $5, 8(%1) \n\t" \
3773 "lw $6, 12(%1) \n\t" \
3774 "lw $7, 16(%1) \n\t" \
3775 "lw $25, 0(%1) \n\t" \
3776 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \
3777 "lw $28, 0($29) \n\t" \
3778 "lw $31, 4($29) \n\t" \
3779 "addu $29, $29, 8 \n\t" \
3782 : "0"( &_argvec[0] ) \
3783 : "memory", __CALLER_SAVED_REGS ); \
3784 lval = (__typeof__( lval ))_res; \
3787 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
3789 volatile OrigFn _orig = ( orig ); \
3790 volatile unsigned long _argvec[12]; \
3791 volatile unsigned long _res; \
3792 _argvec[0] = (unsigned long)_orig.nraddr; \
3793 _argvec[1] = (unsigned long)( arg1 ); \
3794 _argvec[2] = (unsigned long)( arg2 ); \
3795 _argvec[3] = (unsigned long)( arg3 ); \
3796 _argvec[4] = (unsigned long)( arg4 ); \
3797 _argvec[5] = (unsigned long)( arg5 ); \
3798 _argvec[6] = (unsigned long)( arg6 ); \
3799 _argvec[7] = (unsigned long)( arg7 ); \
3800 _argvec[8] = (unsigned long)( arg8 ); \
3801 _argvec[9] = (unsigned long)( arg9 ); \
3802 _argvec[10] = (unsigned long)( arg10 ); \
3803 _argvec[11] = (unsigned long)( arg11 ); \
3804 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3805 "sw $28, 0($29) \n\t" \
3806 "sw $31, 4($29) \n\t" \
3807 "lw $4, 20(%1) \n\t" \
3808 "subu $29, $29, 48\n\t" \
3809 "sw $4, 16($29) \n\t" \
3810 "lw $4, 24(%1) \n\t" \
3811 "sw $4, 20($29) \n\t" \
3812 "lw $4, 28(%1) \n\t" \
3813 "sw $4, 24($29) \n\t" \
3814 "lw $4, 32(%1) \n\t" \
3815 "sw $4, 28($29) \n\t" \
3816 "lw $4, 36(%1) \n\t" \
3817 "sw $4, 32($29) \n\t" \
3818 "lw $4, 40(%1) \n\t" \
3819 "sw $4, 36($29) \n\t" \
3820 "lw $4, 44(%1) \n\t" \
3821 "sw $4, 40($29) \n\t" \
3822 "lw $4, 4(%1) \n\t" \
3823 "lw $5, 8(%1) \n\t" \
3824 "lw $6, 12(%1) \n\t" \
3825 "lw $7, 16(%1) \n\t" \
3826 "lw $25, 0(%1) \n\t" \
3827 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \
3828 "lw $28, 0($29) \n\t" \
3829 "lw $31, 4($29) \n\t" \
3830 "addu $29, $29, 8 \n\t" \
3833 : "0"( &_argvec[0] ) \
3834 : "memory", __CALLER_SAVED_REGS ); \
3835 lval = (__typeof__( lval ))_res; \
3838 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
3840 volatile OrigFn _orig = ( orig ); \
3841 volatile unsigned long _argvec[13]; \
3842 volatile unsigned long _res; \
3843 _argvec[0] = (unsigned long)_orig.nraddr; \
3844 _argvec[1] = (unsigned long)( arg1 ); \
3845 _argvec[2] = (unsigned long)( arg2 ); \
3846 _argvec[3] = (unsigned long)( arg3 ); \
3847 _argvec[4] = (unsigned long)( arg4 ); \
3848 _argvec[5] = (unsigned long)( arg5 ); \
3849 _argvec[6] = (unsigned long)( arg6 ); \
3850 _argvec[7] = (unsigned long)( arg7 ); \
3851 _argvec[8] = (unsigned long)( arg8 ); \
3852 _argvec[9] = (unsigned long)( arg9 ); \
3853 _argvec[10] = (unsigned long)( arg10 ); \
3854 _argvec[11] = (unsigned long)( arg11 ); \
3855 _argvec[12] = (unsigned long)( arg12 ); \
3856 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3857 "sw $28, 0($29) \n\t" \
3858 "sw $31, 4($29) \n\t" \
3859 "lw $4, 20(%1) \n\t" \
3860 "subu $29, $29, 56\n\t" \
3861 "sw $4, 16($29) \n\t" \
3862 "lw $4, 24(%1) \n\t" \
3863 "sw $4, 20($29) \n\t" \
3864 "lw $4, 28(%1) \n\t" \
3865 "sw $4, 24($29) \n\t" \
3866 "lw $4, 32(%1) \n\t" \
3867 "sw $4, 28($29) \n\t" \
3868 "lw $4, 36(%1) \n\t" \
3869 "sw $4, 32($29) \n\t" \
3870 "lw $4, 40(%1) \n\t" \
3871 "sw $4, 36($29) \n\t" \
3872 "lw $4, 44(%1) \n\t" \
3873 "sw $4, 40($29) \n\t" \
3874 "lw $4, 48(%1) \n\t" \
3875 "sw $4, 44($29) \n\t" \
3876 "lw $4, 4(%1) \n\t" \
3877 "lw $5, 8(%1) \n\t" \
3878 "lw $6, 12(%1) \n\t" \
3879 "lw $7, 16(%1) \n\t" \
3880 "lw $25, 0(%1) \n\t" \
3881 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 56 \n\t" \
3882 "lw $28, 0($29) \n\t" \
3883 "lw $31, 4($29) \n\t" \
3884 "addu $29, $29, 8 \n\t" \
3887 : "r"( &_argvec[0] ) \
3888 : "memory", __CALLER_SAVED_REGS ); \
3889 lval = (__typeof__( lval ))_res; \
3896 # if defined( PLAT_mips64_linux )
3899 # define __CALLER_SAVED_REGS \
3900 "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31"
3905 # define CALL_FN_W_v( lval, orig ) \
3907 volatile OrigFn _orig = ( orig ); \
3908 volatile unsigned long _argvec[1]; \
3909 volatile unsigned long _res; \
3910 _argvec[0] = (unsigned long)_orig.nraddr; \
3911 __asm__ volatile( "ld $25, 0(%1)\n\t" \
3912 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3914 : "0"( &_argvec[0] ) \
3915 : "memory", __CALLER_SAVED_REGS ); \
3916 lval = (__typeof__( lval ))_res; \
3919 # define CALL_FN_W_W( lval, orig, arg1 ) \
3921 volatile OrigFn _orig = ( orig ); \
3922 volatile unsigned long _argvec[2]; \
3923 volatile unsigned long _res; \
3924 _argvec[0] = (unsigned long)_orig.nraddr; \
3925 _argvec[1] = (unsigned long)( arg1 ); \
3926 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3927 "ld $25, 0(%1)\n\t" \
3928 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3930 : "r"( &_argvec[0] ) \
3931 : "memory", __CALLER_SAVED_REGS ); \
3932 lval = (__typeof__( lval ))_res; \
3935 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3937 volatile OrigFn _orig = ( orig ); \
3938 volatile unsigned long _argvec[3]; \
3939 volatile unsigned long _res; \
3940 _argvec[0] = (unsigned long)_orig.nraddr; \
3941 _argvec[1] = (unsigned long)( arg1 ); \
3942 _argvec[2] = (unsigned long)( arg2 ); \
3943 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3944 "ld $5, 16(%1)\n\t" \
3945 "ld $25, 0(%1)\n\t" \
3946 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3948 : "r"( &_argvec[0] ) \
3949 : "memory", __CALLER_SAVED_REGS ); \
3950 lval = (__typeof__( lval ))_res; \
3953 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3955 volatile OrigFn _orig = ( orig ); \
3956 volatile unsigned long _argvec[4]; \
3957 volatile unsigned long _res; \
3958 _argvec[0] = (unsigned long)_orig.nraddr; \
3959 _argvec[1] = (unsigned long)( arg1 ); \
3960 _argvec[2] = (unsigned long)( arg2 ); \
3961 _argvec[3] = (unsigned long)( arg3 ); \
3962 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3963 "ld $5, 16(%1)\n\t" \
3964 "ld $6, 24(%1)\n\t" \
3965 "ld $25, 0(%1)\n\t" \
3966 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3968 : "r"( &_argvec[0] ) \
3969 : "memory", __CALLER_SAVED_REGS ); \
3970 lval = (__typeof__( lval ))_res; \
3973 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3975 volatile OrigFn _orig = ( orig ); \
3976 volatile unsigned long _argvec[5]; \
3977 volatile unsigned long _res; \
3978 _argvec[0] = (unsigned long)_orig.nraddr; \
3979 _argvec[1] = (unsigned long)( arg1 ); \
3980 _argvec[2] = (unsigned long)( arg2 ); \
3981 _argvec[3] = (unsigned long)( arg3 ); \
3982 _argvec[4] = (unsigned long)( arg4 ); \
3983 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3984 "ld $5, 16(%1)\n\t" \
3985 "ld $6, 24(%1)\n\t" \
3986 "ld $7, 32(%1)\n\t" \
3987 "ld $25, 0(%1)\n\t" \
3988 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3990 : "r"( &_argvec[0] ) \
3991 : "memory", __CALLER_SAVED_REGS ); \
3992 lval = (__typeof__( lval ))_res; \
3995 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3997 volatile OrigFn _orig = ( orig ); \
3998 volatile unsigned long _argvec[6]; \
3999 volatile unsigned long _res; \
4000 _argvec[0] = (unsigned long)_orig.nraddr; \
4001 _argvec[1] = (unsigned long)( arg1 ); \
4002 _argvec[2] = (unsigned long)( arg2 ); \
4003 _argvec[3] = (unsigned long)( arg3 ); \
4004 _argvec[4] = (unsigned long)( arg4 ); \
4005 _argvec[5] = (unsigned long)( arg5 ); \
4006 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4007 "ld $5, 16(%1)\n\t" \
4008 "ld $6, 24(%1)\n\t" \
4009 "ld $7, 32(%1)\n\t" \
4010 "ld $8, 40(%1)\n\t" \
4011 "ld $25, 0(%1)\n\t" \
4012 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4014 : "r"( &_argvec[0] ) \
4015 : "memory", __CALLER_SAVED_REGS ); \
4016 lval = (__typeof__( lval ))_res; \
4019 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
4021 volatile OrigFn _orig = ( orig ); \
4022 volatile unsigned long _argvec[7]; \
4023 volatile unsigned long _res; \
4024 _argvec[0] = (unsigned long)_orig.nraddr; \
4025 _argvec[1] = (unsigned long)( arg1 ); \
4026 _argvec[2] = (unsigned long)( arg2 ); \
4027 _argvec[3] = (unsigned long)( arg3 ); \
4028 _argvec[4] = (unsigned long)( arg4 ); \
4029 _argvec[5] = (unsigned long)( arg5 ); \
4030 _argvec[6] = (unsigned long)( arg6 ); \
4031 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4032 "ld $5, 16(%1)\n\t" \
4033 "ld $6, 24(%1)\n\t" \
4034 "ld $7, 32(%1)\n\t" \
4035 "ld $8, 40(%1)\n\t" \
4036 "ld $9, 48(%1)\n\t" \
4037 "ld $25, 0(%1)\n\t" \
4038 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4040 : "r"( &_argvec[0] ) \
4041 : "memory", __CALLER_SAVED_REGS ); \
4042 lval = (__typeof__( lval ))_res; \
4045 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
4047 volatile OrigFn _orig = ( orig ); \
4048 volatile unsigned long _argvec[8]; \
4049 volatile unsigned long _res; \
4050 _argvec[0] = (unsigned long)_orig.nraddr; \
4051 _argvec[1] = (unsigned long)( arg1 ); \
4052 _argvec[2] = (unsigned long)( arg2 ); \
4053 _argvec[3] = (unsigned long)( arg3 ); \
4054 _argvec[4] = (unsigned long)( arg4 ); \
4055 _argvec[5] = (unsigned long)( arg5 ); \
4056 _argvec[6] = (unsigned long)( arg6 ); \
4057 _argvec[7] = (unsigned long)( arg7 ); \
4058 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4059 "ld $5, 16(%1)\n\t" \
4060 "ld $6, 24(%1)\n\t" \
4061 "ld $7, 32(%1)\n\t" \
4062 "ld $8, 40(%1)\n\t" \
4063 "ld $9, 48(%1)\n\t" \
4064 "ld $10, 56(%1)\n\t" \
4065 "ld $25, 0(%1) \n\t" \
4066 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4068 : "r"( &_argvec[0] ) \
4069 : "memory", __CALLER_SAVED_REGS ); \
4070 lval = (__typeof__( lval ))_res; \
4073 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
4075 volatile OrigFn _orig = ( orig ); \
4076 volatile unsigned long _argvec[9]; \
4077 volatile unsigned long _res; \
4078 _argvec[0] = (unsigned long)_orig.nraddr; \
4079 _argvec[1] = (unsigned long)( arg1 ); \
4080 _argvec[2] = (unsigned long)( arg2 ); \
4081 _argvec[3] = (unsigned long)( arg3 ); \
4082 _argvec[4] = (unsigned long)( arg4 ); \
4083 _argvec[5] = (unsigned long)( arg5 ); \
4084 _argvec[6] = (unsigned long)( arg6 ); \
4085 _argvec[7] = (unsigned long)( arg7 ); \
4086 _argvec[8] = (unsigned long)( arg8 ); \
4087 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4088 "ld $5, 16(%1)\n\t" \
4089 "ld $6, 24(%1)\n\t" \
4090 "ld $7, 32(%1)\n\t" \
4091 "ld $8, 40(%1)\n\t" \
4092 "ld $9, 48(%1)\n\t" \
4093 "ld $10, 56(%1)\n\t" \
4094 "ld $11, 64(%1)\n\t" \
4095 "ld $25, 0(%1) \n\t" \
4096 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4098 : "r"( &_argvec[0] ) \
4099 : "memory", __CALLER_SAVED_REGS ); \
4100 lval = (__typeof__( lval ))_res; \
4103 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
4105 volatile OrigFn _orig = ( orig ); \
4106 volatile unsigned long _argvec[10]; \
4107 volatile unsigned long _res; \
4108 _argvec[0] = (unsigned long)_orig.nraddr; \
4109 _argvec[1] = (unsigned long)( arg1 ); \
4110 _argvec[2] = (unsigned long)( arg2 ); \
4111 _argvec[3] = (unsigned long)( arg3 ); \
4112 _argvec[4] = (unsigned long)( arg4 ); \
4113 _argvec[5] = (unsigned long)( arg5 ); \
4114 _argvec[6] = (unsigned long)( arg6 ); \
4115 _argvec[7] = (unsigned long)( arg7 ); \
4116 _argvec[8] = (unsigned long)( arg8 ); \
4117 _argvec[9] = (unsigned long)( arg9 ); \
4118 __asm__ volatile( "dsubu $29, $29, 8\n\t" \
4119 "ld $4, 72(%1)\n\t" \
4120 "sd $4, 0($29)\n\t" \
4121 "ld $4, 8(%1)\n\t" \
4122 "ld $5, 16(%1)\n\t" \
4123 "ld $6, 24(%1)\n\t" \
4124 "ld $7, 32(%1)\n\t" \
4125 "ld $8, 40(%1)\n\t" \
4126 "ld $9, 48(%1)\n\t" \
4127 "ld $10, 56(%1)\n\t" \
4128 "ld $11, 64(%1)\n\t" \
4129 "ld $25, 0(%1)\n\t" \
4130 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 8\n\t" \
4133 : "r"( &_argvec[0] ) \
4134 : "memory", __CALLER_SAVED_REGS ); \
4135 lval = (__typeof__( lval ))_res; \
4138 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
4140 volatile OrigFn _orig = ( orig ); \
4141 volatile unsigned long _argvec[11]; \
4142 volatile unsigned long _res; \
4143 _argvec[0] = (unsigned long)_orig.nraddr; \
4144 _argvec[1] = (unsigned long)( arg1 ); \
4145 _argvec[2] = (unsigned long)( arg2 ); \
4146 _argvec[3] = (unsigned long)( arg3 ); \
4147 _argvec[4] = (unsigned long)( arg4 ); \
4148 _argvec[5] = (unsigned long)( arg5 ); \
4149 _argvec[6] = (unsigned long)( arg6 ); \
4150 _argvec[7] = (unsigned long)( arg7 ); \
4151 _argvec[8] = (unsigned long)( arg8 ); \
4152 _argvec[9] = (unsigned long)( arg9 ); \
4153 _argvec[10] = (unsigned long)( arg10 ); \
4154 __asm__ volatile( "dsubu $29, $29, 16\n\t" \
4155 "ld $4, 72(%1)\n\t" \
4156 "sd $4, 0($29)\n\t" \
4157 "ld $4, 80(%1)\n\t" \
4158 "sd $4, 8($29)\n\t" \
4159 "ld $4, 8(%1)\n\t" \
4160 "ld $5, 16(%1)\n\t" \
4161 "ld $6, 24(%1)\n\t" \
4162 "ld $7, 32(%1)\n\t" \
4163 "ld $8, 40(%1)\n\t" \
4164 "ld $9, 48(%1)\n\t" \
4165 "ld $10, 56(%1)\n\t" \
4166 "ld $11, 64(%1)\n\t" \
4167 "ld $25, 0(%1)\n\t" \
4168 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 16\n\t" \
4171 : "r"( &_argvec[0] ) \
4172 : "memory", __CALLER_SAVED_REGS ); \
4173 lval = (__typeof__( lval ))_res; \
4176 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
4178 volatile OrigFn _orig = ( orig ); \
4179 volatile unsigned long _argvec[12]; \
4180 volatile unsigned long _res; \
4181 _argvec[0] = (unsigned long)_orig.nraddr; \
4182 _argvec[1] = (unsigned long)( arg1 ); \
4183 _argvec[2] = (unsigned long)( arg2 ); \
4184 _argvec[3] = (unsigned long)( arg3 ); \
4185 _argvec[4] = (unsigned long)( arg4 ); \
4186 _argvec[5] = (unsigned long)( arg5 ); \
4187 _argvec[6] = (unsigned long)( arg6 ); \
4188 _argvec[7] = (unsigned long)( arg7 ); \
4189 _argvec[8] = (unsigned long)( arg8 ); \
4190 _argvec[9] = (unsigned long)( arg9 ); \
4191 _argvec[10] = (unsigned long)( arg10 ); \
4192 _argvec[11] = (unsigned long)( arg11 ); \
4193 __asm__ volatile( "dsubu $29, $29, 24\n\t" \
4194 "ld $4, 72(%1)\n\t" \
4195 "sd $4, 0($29)\n\t" \
4196 "ld $4, 80(%1)\n\t" \
4197 "sd $4, 8($29)\n\t" \
4198 "ld $4, 88(%1)\n\t" \
4199 "sd $4, 16($29)\n\t" \
4200 "ld $4, 8(%1)\n\t" \
4201 "ld $5, 16(%1)\n\t" \
4202 "ld $6, 24(%1)\n\t" \
4203 "ld $7, 32(%1)\n\t" \
4204 "ld $8, 40(%1)\n\t" \
4205 "ld $9, 48(%1)\n\t" \
4206 "ld $10, 56(%1)\n\t" \
4207 "ld $11, 64(%1)\n\t" \
4208 "ld $25, 0(%1)\n\t" \
4209 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 24\n\t" \
4212 : "r"( &_argvec[0] ) \
4213 : "memory", __CALLER_SAVED_REGS ); \
4214 lval = (__typeof__( lval ))_res; \
4217 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
4219 volatile OrigFn _orig = ( orig ); \
4220 volatile unsigned long _argvec[13]; \
4221 volatile unsigned long _res; \
4222 _argvec[0] = (unsigned long)_orig.nraddr; \
4223 _argvec[1] = (unsigned long)( arg1 ); \
4224 _argvec[2] = (unsigned long)( arg2 ); \
4225 _argvec[3] = (unsigned long)( arg3 ); \
4226 _argvec[4] = (unsigned long)( arg4 ); \
4227 _argvec[5] = (unsigned long)( arg5 ); \
4228 _argvec[6] = (unsigned long)( arg6 ); \
4229 _argvec[7] = (unsigned long)( arg7 ); \
4230 _argvec[8] = (unsigned long)( arg8 ); \
4231 _argvec[9] = (unsigned long)( arg9 ); \
4232 _argvec[10] = (unsigned long)( arg10 ); \
4233 _argvec[11] = (unsigned long)( arg11 ); \
4234 _argvec[12] = (unsigned long)( arg12 ); \
4235 __asm__ volatile( "dsubu $29, $29, 32\n\t" \
4236 "ld $4, 72(%1)\n\t" \
4237 "sd $4, 0($29)\n\t" \
4238 "ld $4, 80(%1)\n\t" \
4239 "sd $4, 8($29)\n\t" \
4240 "ld $4, 88(%1)\n\t" \
4241 "sd $4, 16($29)\n\t" \
4242 "ld $4, 96(%1)\n\t" \
4243 "sd $4, 24($29)\n\t" \
4244 "ld $4, 8(%1)\n\t" \
4245 "ld $5, 16(%1)\n\t" \
4246 "ld $6, 24(%1)\n\t" \
4247 "ld $7, 32(%1)\n\t" \
4248 "ld $8, 40(%1)\n\t" \
4249 "ld $9, 48(%1)\n\t" \
4250 "ld $10, 56(%1)\n\t" \
4251 "ld $11, 64(%1)\n\t" \
4252 "ld $25, 0(%1)\n\t" \
4253 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 32\n\t" \
4256 : "r"( &_argvec[0] ) \
4257 : "memory", __CALLER_SAVED_REGS ); \
4258 lval = (__typeof__( lval ))_res; \
4278 # define VG_USERREQ_TOOL_BASE( a, b ) ( (unsigned int)( ( (a)&0xff ) << 24 | ( (b)&0xff ) << 16 ) )
4279 # define VG_IS_TOOL_USERREQ( a, b, v ) ( VG_USERREQ_TOOL_BASE( a, b ) == ( (v)&0xffff0000 ) )
4358 # if !defined( __GNUC__ )
4359 # define __extension__
4366 # define RUNNING_ON_VALGRIND \
4367 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__RUNNING_ON_VALGRIND, 0, 0, 0, 0, 0 )
4373 # define VALGRIND_DISCARD_TRANSLATIONS( _qzz_addr, _qzz_len ) \
4374 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DISCARD_TRANSLATIONS, _qzz_addr, _qzz_len, 0, 0, 0 )
4381 # if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER )
4384 static int VALGRIND_PRINTF(
const char*
format, ... ) __attribute__( (
format( __printf__, 1, 2 ), __unused__ ) );
4387 # if defined( _MSC_VER )
4390 VALGRIND_PRINTF(
const char*
format, ... ) {
4391 # if defined( NVALGRIND )
4394 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4397 unsigned long _qzz_res;
4400 va_start( vargs,
format );
4401 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4406 (
unsigned long)&vargs, 0, 0, 0 );
4409 return (
int)_qzz_res;
4413 # if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER )
4414 static int VALGRIND_PRINTF_BACKTRACE(
const char*
format, ... )
4415 __attribute__( (
format( __printf__, 1, 2 ), __unused__ ) );
4418 # if defined( _MSC_VER )
4421 VALGRIND_PRINTF_BACKTRACE(
const char*
format, ... ) {
4422 # if defined( NVALGRIND )
4425 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4428 unsigned long _qzz_res;
4431 va_start( vargs,
format );
4432 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4434 (uintptr_t)&vargs, 0, 0, 0 );
4437 (
unsigned long)&vargs, 0, 0, 0 );
4440 return (
int)_qzz_res;
4467 # define VALGRIND_NON_SIMD_CALL0( _qyy_fn ) \
4468 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL0, _qyy_fn, 0, 0, 0, 0 )
4470 # define VALGRIND_NON_SIMD_CALL1( _qyy_fn, _qyy_arg1 ) \
4471 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL1, _qyy_fn, _qyy_arg1, 0, 0, 0 )
4473 # define VALGRIND_NON_SIMD_CALL2( _qyy_fn, _qyy_arg1, _qyy_arg2 ) \
4474 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL2, _qyy_fn, _qyy_arg1, _qyy_arg2, \
4477 # define VALGRIND_NON_SIMD_CALL3( _qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3 ) \
4478 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL3, _qyy_fn, _qyy_arg1, _qyy_arg2, \
4484 # define VALGRIND_COUNT_ERRORS \
4485 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__COUNT_ERRORS, 0, 0, 0, 0, 0 )
4588 # define VALGRIND_MALLOCLIKE_BLOCK( addr, sizeB, rzB, is_zeroed ) \
4589 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MALLOCLIKE_BLOCK, addr, sizeB, rzB, is_zeroed, 0 )
4594 # define VALGRIND_RESIZEINPLACE_BLOCK( addr, oldSizeB, newSizeB, rzB ) \
4595 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__RESIZEINPLACE_BLOCK, addr, oldSizeB, newSizeB, rzB, 0 )
4600 # define VALGRIND_FREELIKE_BLOCK( addr, rzB ) \
4601 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__FREELIKE_BLOCK, addr, rzB, 0, 0, 0 )
4604 # define VALGRIND_CREATE_MEMPOOL( pool, rzB, is_zeroed ) \
4605 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CREATE_MEMPOOL, pool, rzB, is_zeroed, 0, 0 )
4608 # define VALGRIND_DESTROY_MEMPOOL( pool ) \
4609 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DESTROY_MEMPOOL, pool, 0, 0, 0, 0 )
4612 # define VALGRIND_MEMPOOL_ALLOC( pool, addr, size ) \
4613 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_ALLOC, pool, addr, size, 0, 0 )
4616 # define VALGRIND_MEMPOOL_FREE( pool, addr ) \
4617 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_FREE, pool, addr, 0, 0, 0 )
4620 # define VALGRIND_MEMPOOL_TRIM( pool, addr, size ) \
4621 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_TRIM, pool, addr, size, 0, 0 )
4624 # define VALGRIND_MOVE_MEMPOOL( poolA, poolB ) \
4625 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MOVE_MEMPOOL, poolA, poolB, 0, 0, 0 )
4628 # define VALGRIND_MEMPOOL_CHANGE( pool, addrA, addrB, size ) \
4629 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_CHANGE, pool, addrA, addrB, size, 0 )
4632 # define VALGRIND_MEMPOOL_EXISTS( pool ) \
4633 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MEMPOOL_EXISTS, pool, 0, 0, 0, 0 )
4636 # define VALGRIND_STACK_REGISTER( start, end ) \
4637 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__STACK_REGISTER, start, end, 0, 0, 0 )
4641 # define VALGRIND_STACK_DEREGISTER( id ) \
4642 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_DEREGISTER, id, 0, 0, 0, 0 )
4645 # define VALGRIND_STACK_CHANGE( id, start, end ) \
4646 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_CHANGE, id, start, end, 0, 0 )
4649 # define VALGRIND_LOAD_PDB_DEBUGINFO( fd, ptr, total_size, delta ) \
4650 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__LOAD_PDB_DEBUGINFO, fd, ptr, total_size, delta, 0 )
4656 # define VALGRIND_MAP_IP_TO_SRCLOC( addr, buf64 ) \
4657 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MAP_IP_TO_SRCLOC, addr, buf64, 0, 0, 0 )
4667 # define VALGRIND_DISABLE_ERROR_REPORTING \
4668 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, 1, 0, 0, 0, 0 )
4672 # define VALGRIND_ENABLE_ERROR_REPORTING \
4673 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, -1, 0, 0, 0, 0 )
4680 # define VALGRIND_MONITOR_COMMAND( command ) \
4681 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__GDB_MONITOR_COMMAND, command, 0, 0, 0, 0 )
4683 # undef PLAT_x86_darwin
4684 # undef PLAT_amd64_darwin
4685 # undef PLAT_x86_win32
4686 # undef PLAT_amd64_win64
4687 # undef PLAT_x86_linux
4688 # undef PLAT_amd64_linux
4689 # undef PLAT_ppc32_linux
4690 # undef PLAT_ppc64_linux
4691 # undef PLAT_arm_linux
4692 # undef PLAT_s390x_linux
4693 # undef PLAT_mips32_linux
4694 # undef PLAT_mips64_linux
@ VG_USERREQ__MAP_IP_TO_SRCLOC
@ VG_USERREQ__CREATE_MEMPOOL
@ VG_USERREQ__FREELIKE_BLOCK
@ VG_USERREQ__LOAD_PDB_DEBUGINFO
@ VG_USERREQ__DISCARD_TRANSLATIONS
@ VG_USERREQ__DESTROY_MEMPOOL
@ VG_USERREQ__MALLOCLIKE_BLOCK
@ VG_USERREQ__CLIENT_CALL2
@ VG_USERREQ__PRINTF_VALIST_BY_REF
@ VG_USERREQ__CLIENT_CALL0
@ VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5)
@ VG_USERREQ__MEMPOOL_TRIM
@ VG_USERREQ__MEMPOOL_CHANGE
@ VG_USERREQ__CLIENT_CALL3
@ VG_USERREQ__MOVE_MEMPOOL
@ VG_USERREQ__COUNT_ERRORS
@ VG_USERREQ__CHANGE_ERR_DISABLEMENT
@ VG_USERREQ__MEMPOOL_EXISTS
@ VG_USERREQ__PRINTF_BACKTRACE
GAUDI_API std::string format(const char *,...)
MsgStream format utility "a la sprintf(...)".
@ VG_USERREQ__STACK_REGISTER
@ VG_USERREQ__GDB_MONITOR_COMMAND
@ VG_USERREQ__STACK_CHANGE
@ VG_USERREQ__RUNNING_ON_VALGRIND
@ VG_USERREQ__VEX_INIT_FOR_IRI
@ VG_USERREQ__CLIENT_CALL1
@ VG_USERREQ__MEMPOOL_FREE
@ VG_USERREQ__STACK_DEREGISTER
@ VG_USERREQ__RESIZEINPLACE_BLOCK
@ VG_USERREQ__MEMPOOL_ALLOC