Go to the documentation of this file.
89 #define __VALGRIND_MAJOR__ 3
90 #define __VALGRIND_MINOR__ 8
108 #undef PLAT_x86_darwin
109 #undef PLAT_amd64_darwin
110 #undef PLAT_x86_win32
111 #undef PLAT_amd64_win64
112 #undef PLAT_x86_linux
113 #undef PLAT_amd64_linux
114 #undef PLAT_ppc32_linux
115 #undef PLAT_ppc64_linux
116 #undef PLAT_arm_linux
117 #undef PLAT_s390x_linux
118 #undef PLAT_mips32_linux
119 #undef PLAT_mips64_linux
121 #if defined( __APPLE__ ) && defined( __i386__ )
122 # define PLAT_x86_darwin 1
123 #elif defined( __APPLE__ ) && defined( __x86_64__ )
124 # define PLAT_amd64_darwin 1
125 #elif defined( __MINGW32__ ) || defined( __CYGWIN32__ ) || ( defined( _WIN32 ) && defined( _M_IX86 ) )
126 # define PLAT_x86_win32 1
127 #elif defined( __MINGW64__ ) || ( defined( _WIN64 ) && defined( _M_X64 ) )
128 # define PLAT_amd64_win64 1
129 #elif defined( __linux__ ) && defined( __i386__ )
130 # define PLAT_x86_linux 1
131 #elif defined( __linux__ ) && defined( __x86_64__ )
132 # define PLAT_amd64_linux 1
133 #elif defined( __linux__ ) && defined( __powerpc__ ) && !defined( __powerpc64__ )
134 # define PLAT_ppc32_linux 1
135 #elif defined( __linux__ ) && defined( __powerpc__ ) && defined( __powerpc64__ )
136 # define PLAT_ppc64_linux 1
137 #elif defined( __linux__ ) && defined( __arm__ )
138 # define PLAT_arm_linux 1
139 #elif defined( __linux__ ) && defined( __s390__ ) && defined( __s390x__ )
140 # define PLAT_s390x_linux 1
141 #elif defined( __linux__ ) && defined( __mips__ )
142 # if ( __mips == 64 )
143 # define PLAT_mips64_linux 1
145 # define PLAT_mips32_linux 1
150 # if !defined( NVALGRIND )
173 #define VALGRIND_DO_CLIENT_REQUEST( _zzq_rlval, _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, \
174 _zzq_arg4, _zzq_arg5 ) \
176 ( _zzq_rlval ) = VALGRIND_DO_CLIENT_REQUEST_EXPR( ( _zzq_default ), ( _zzq_request ), ( _zzq_arg1 ), \
177 ( _zzq_arg2 ), ( _zzq_arg3 ), ( _zzq_arg4 ), ( _zzq_arg5 ) ); \
180 #define VALGRIND_DO_CLIENT_REQUEST_STMT( _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5 ) \
182 (void)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, ( _zzq_request ), ( _zzq_arg1 ), ( _zzq_arg2 ), ( _zzq_arg3 ), \
183 ( _zzq_arg4 ), ( _zzq_arg5 ) ); \
186 #if defined( NVALGRIND )
191 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
234 # if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin ) || ( defined( PLAT_x86_win32 ) && defined( __GNUC__ ) )
240 # define __SPECIAL_INSTRUCTION_PREAMBLE \
241 "roll $3, %%edi ; roll $13, %%edi\n\t" \
242 "roll $29, %%edi ; roll $19, %%edi\n\t"
244 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
247 volatile unsigned int _zzq_args[6]; \
248 volatile unsigned int _zzq_result; \
249 _zzq_args[0] = (unsigned int)( _zzq_request ); \
250 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
251 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
252 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
253 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
254 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
255 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
256 "xchgl %%ebx,%%ebx" \
257 : "=d"( _zzq_result ) \
258 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
259 : "cc", "memory" ); \
263 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
265 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
266 volatile unsigned int __addr; \
267 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
268 "xchgl %%ecx,%%ecx" \
271 : "cc", "memory" ); \
272 _zzq_orig->nraddr = __addr; \
275 # define VALGRIND_CALL_NOREDIR_EAX \
276 __SPECIAL_INSTRUCTION_PREAMBLE \
278 "xchgl %%edx,%%edx\n\t"
280 # define VALGRIND_VEX_INJECT_IR() \
282 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgl %%edi,%%edi\n\t" : : : "cc", "memory" ); \
289 # if defined( PLAT_x86_win32 ) && !defined( __GNUC__ )
295 # if defined( _MSC_VER )
297 # define __SPECIAL_INSTRUCTION_PREAMBLE __asm rol edi, 3 __asm rol edi, 13 __asm rol edi, 29 __asm rol edi, 19
299 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
301 valgrind_do_client_request_expr( (uintptr_t)( _zzq_default ), (uintptr_t)( _zzq_request ), \
302 (uintptr_t)( _zzq_arg1 ), (uintptr_t)( _zzq_arg2 ), (uintptr_t)( _zzq_arg3 ), \
303 (uintptr_t)( _zzq_arg4 ), (uintptr_t)( _zzq_arg5 ) )
305 static __inline uintptr_t valgrind_do_client_request_expr( uintptr_t _zzq_default, uintptr_t _zzq_request,
306 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
307 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
308 uintptr_t _zzq_arg5 ) {
309 volatile uintptr_t _zzq_args[6];
310 volatile unsigned int _zzq_result;
311 _zzq_args[0] = (uintptr_t)( _zzq_request );
312 _zzq_args[1] = (uintptr_t)( _zzq_arg1 );
313 _zzq_args[2] = (uintptr_t)( _zzq_arg2 );
314 _zzq_args[3] = (uintptr_t)( _zzq_arg3 );
315 _zzq_args[4] = (uintptr_t)( _zzq_arg4 );
316 _zzq_args[5] = (uintptr_t)( _zzq_arg5 );
317 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
318 __SPECIAL_INSTRUCTION_PREAMBLE
321 __asm mov _zzq_result, edx
326 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
328 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
329 volatile unsigned int __addr; \
330 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
332 __asm mov __addr, eax } \
333 _zzq_orig->nraddr = __addr; \
336 # define VALGRIND_CALL_NOREDIR_EAX ERROR
338 # define VALGRIND_VEX_INJECT_IR() \
340 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
341 __asm xchg edi,edi } \
345 # error Unsupported compiler.
352 # if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin )
355 unsigned long long int nraddr;
358 # define __SPECIAL_INSTRUCTION_PREAMBLE \
359 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
360 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
362 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
365 volatile unsigned long long int _zzq_args[6]; \
366 volatile unsigned long long int _zzq_result; \
367 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
368 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
369 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
370 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
371 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
372 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
373 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
374 "xchgq %%rbx,%%rbx" \
375 : "=d"( _zzq_result ) \
376 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
377 : "cc", "memory" ); \
381 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
383 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
384 volatile unsigned long long int __addr; \
385 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
386 "xchgq %%rcx,%%rcx" \
389 : "cc", "memory" ); \
390 _zzq_orig->nraddr = __addr; \
393 # define VALGRIND_CALL_NOREDIR_RAX \
394 __SPECIAL_INSTRUCTION_PREAMBLE \
396 "xchgq %%rdx,%%rdx\n\t"
398 # define VALGRIND_VEX_INJECT_IR() \
400 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgq %%rdi,%%rdi\n\t" : : : "cc", "memory" ); \
407 # if defined( PLAT_ppc32_linux )
413 # define __SPECIAL_INSTRUCTION_PREAMBLE \
414 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
415 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
417 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
421 unsigned int _zzq_args[6]; \
422 unsigned int _zzq_result; \
423 unsigned int* _zzq_ptr; \
424 _zzq_args[0] = (unsigned int)( _zzq_request ); \
425 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
426 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
427 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
428 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
429 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
430 _zzq_ptr = _zzq_args; \
431 __asm__ volatile( "mr 3,%1\n\t" \
433 __SPECIAL_INSTRUCTION_PREAMBLE \
436 : "=b"( _zzq_result ) \
437 : "b"( _zzq_default ), "b"( _zzq_ptr ) \
438 : "cc", "memory", "r3", "r4" ); \
442 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
444 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
445 unsigned int __addr; \
446 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
451 : "cc", "memory", "r3" ); \
452 _zzq_orig->nraddr = __addr; \
455 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
456 __SPECIAL_INSTRUCTION_PREAMBLE \
460 # define VALGRIND_VEX_INJECT_IR() \
461 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); } while ( 0 )
467 # if defined( PLAT_ppc64_linux )
470 unsigned long long int nraddr;
471 unsigned long long int r2;
474 # define __SPECIAL_INSTRUCTION_PREAMBLE \
475 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
476 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
478 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
482 unsigned long long int _zzq_args[6]; \
483 unsigned long long int _zzq_result; \
484 unsigned long long int* _zzq_ptr; \
485 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
486 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
487 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
488 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
489 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
490 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
491 _zzq_ptr = _zzq_args; \
492 __asm__ volatile( "mr 3,%1\n\t" \
494 __SPECIAL_INSTRUCTION_PREAMBLE \
497 : "=b"( _zzq_result ) \
498 : "b"( _zzq_default ), "b"( _zzq_ptr ) \
499 : "cc", "memory", "r3", "r4" ); \
503 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
505 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
506 unsigned long long int __addr; \
507 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
512 : "cc", "memory", "r3" ); \
513 _zzq_orig->nraddr = __addr; \
514 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
519 : "cc", "memory", "r3" ); \
520 _zzq_orig->r2 = __addr; \
523 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
524 __SPECIAL_INSTRUCTION_PREAMBLE \
528 # define VALGRIND_VEX_INJECT_IR() \
529 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); } while ( 0 )
535 # if defined( PLAT_arm_linux )
541 # define __SPECIAL_INSTRUCTION_PREAMBLE \
542 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
543 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
545 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
549 volatile unsigned int _zzq_args[6]; \
550 volatile unsigned int _zzq_result; \
551 _zzq_args[0] = (unsigned int)( _zzq_request ); \
552 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
553 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
554 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
555 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
556 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
557 __asm__ volatile( "mov r3, %1\n\t" \
559 __SPECIAL_INSTRUCTION_PREAMBLE \
560 "orr r10, r10, r10\n\t" \
562 : "=r"( _zzq_result ) \
563 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
564 : "cc", "memory", "r3", "r4" ); \
568 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
570 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
571 unsigned int __addr; \
572 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
573 "orr r11, r11, r11\n\t" \
577 : "cc", "memory", "r3" ); \
578 _zzq_orig->nraddr = __addr; \
581 # define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
582 __SPECIAL_INSTRUCTION_PREAMBLE \
584 "orr r12, r12, r12\n\t"
586 # define VALGRIND_VEX_INJECT_IR() \
587 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "orr r9, r9, r9\n\t" : : : "cc", "memory" ); } while ( 0 )
593 # if defined( PLAT_s390x_linux )
596 unsigned long long int nraddr;
603 # define __SPECIAL_INSTRUCTION_PREAMBLE \
609 # define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
610 # define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
611 # define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
612 # define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
614 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
617 volatile unsigned long long int _zzq_args[6]; \
618 volatile unsigned long long int _zzq_result; \
619 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
620 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
621 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
622 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
623 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
624 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
627 "lgr 3,%2\n\t" __SPECIAL_INSTRUCTION_PREAMBLE __CLIENT_REQUEST_CODE \
629 : "=d"( _zzq_result ) \
630 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \
631 : "cc", "2", "3", "memory" ); \
635 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
637 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
638 volatile unsigned long long int __addr; \
639 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __GET_NR_CONTEXT_CODE "lgr %0, 3\n\t" \
642 : "cc", "3", "memory" ); \
643 _zzq_orig->nraddr = __addr; \
646 # define VALGRIND_CALL_NOREDIR_R1 \
647 __SPECIAL_INSTRUCTION_PREAMBLE \
650 # define VALGRIND_VEX_INJECT_IR() \
651 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __VEX_INJECT_IR_CODE ); } while ( 0 )
657 # if defined( PLAT_mips32_linux )
667 # define __SPECIAL_INSTRUCTION_PREAMBLE \
668 "srl $0, $0, 13\n\t" \
669 "srl $0, $0, 29\n\t" \
670 "srl $0, $0, 3\n\t" \
673 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
676 volatile unsigned int _zzq_args[6]; \
677 volatile unsigned int _zzq_result; \
678 _zzq_args[0] = (unsigned int)( _zzq_request ); \
679 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \
680 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \
681 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \
682 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \
683 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \
684 __asm__ volatile( "move $11, %1\n\t" \
686 __SPECIAL_INSTRUCTION_PREAMBLE \
687 "or $13, $13, $13\n\t" \
689 : "=r"( _zzq_result ) \
690 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
695 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
697 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
698 volatile unsigned int __addr; \
699 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
700 "or $14, $14, $14\n\t" \
705 _zzq_orig->nraddr = __addr; \
708 # define VALGRIND_CALL_NOREDIR_T9 \
709 __SPECIAL_INSTRUCTION_PREAMBLE \
711 "or $15, $15, $15\n\t"
713 # define VALGRIND_VEX_INJECT_IR() \
714 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); } while ( 0 )
720 # if defined( PLAT_mips64_linux )
723 unsigned long long nraddr;
730 # define __SPECIAL_INSTRUCTION_PREAMBLE \
731 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
732 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
734 # define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
737 volatile unsigned long long int _zzq_args[6]; \
738 volatile unsigned long long int _zzq_result; \
739 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \
740 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \
741 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \
742 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \
743 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \
744 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \
745 __asm__ volatile( "move $11, %1\n\t" \
747 __SPECIAL_INSTRUCTION_PREAMBLE \
748 "or $13, $13, $13\n\t" \
750 : "=r"( _zzq_result ) \
751 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \
756 # define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \
758 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \
759 volatile unsigned long long int __addr; \
760 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \
761 "or $14, $14, $14\n\t" \
766 _zzq_orig->nraddr = __addr; \
769 # define VALGRIND_CALL_NOREDIR_T9 \
770 __SPECIAL_INSTRUCTION_PREAMBLE \
772 "or $15, $15, $15\n\t"
774 # define VALGRIND_VEX_INJECT_IR() \
775 do { __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); } while ( 0 )
814 #define VG_CONCAT4( _aa, _bb, _cc, _dd ) _aa##_bb##_cc##_dd
816 #define I_WRAP_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgw00000ZU_, soname, _, fnname )
818 #define I_WRAP_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgw00000ZZ_, soname, _, fnname )
824 #define VALGRIND_GET_ORIG_FN( _lval ) VALGRIND_GET_NR_CONTEXT( _lval )
832 #define I_REPLACE_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgr00000ZU_, soname, _, fnname )
834 #define I_REPLACE_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgr00000ZZ_, soname, _, fnname )
839 #define CALL_FN_v_v( fnptr ) \
841 volatile unsigned long _junk; \
842 CALL_FN_W_v( _junk, fnptr ); \
845 #define CALL_FN_v_W( fnptr, arg1 ) \
847 volatile unsigned long _junk; \
848 CALL_FN_W_W( _junk, fnptr, arg1 ); \
851 #define CALL_FN_v_WW( fnptr, arg1, arg2 ) \
853 volatile unsigned long _junk; \
854 CALL_FN_W_WW( _junk, fnptr, arg1, arg2 ); \
857 #define CALL_FN_v_WWW( fnptr, arg1, arg2, arg3 ) \
859 volatile unsigned long _junk; \
860 CALL_FN_W_WWW( _junk, fnptr, arg1, arg2, arg3 ); \
863 #define CALL_FN_v_WWWW( fnptr, arg1, arg2, arg3, arg4 ) \
865 volatile unsigned long _junk; \
866 CALL_FN_W_WWWW( _junk, fnptr, arg1, arg2, arg3, arg4 ); \
869 #define CALL_FN_v_5W( fnptr, arg1, arg2, arg3, arg4, arg5 ) \
871 volatile unsigned long _junk; \
872 CALL_FN_W_5W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5 ); \
875 #define CALL_FN_v_6W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ) \
877 volatile unsigned long _junk; \
878 CALL_FN_W_6W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ); \
881 #define CALL_FN_v_7W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
883 volatile unsigned long _junk; \
884 CALL_FN_W_7W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ); \
889 #if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin )
893 # define __CALLER_SAVED_REGS "ecx", "edx"
900 # define VALGRIND_ALIGN_STACK \
901 "movl %%esp,%%edi\n\t" \
902 "andl $0xfffffff0,%%esp\n\t"
903 # define VALGRIND_RESTORE_STACK "movl %%edi,%%esp\n\t"
908 # define CALL_FN_W_v( lval, orig ) \
910 volatile OrigFn _orig = ( orig ); \
911 volatile unsigned long _argvec[1]; \
912 volatile unsigned long _res; \
913 _argvec[0] = (unsigned long)_orig.nraddr; \
914 __asm__ volatile( VALGRIND_ALIGN_STACK "movl (%%eax), %%eax\n\t" \
915 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
917 : "a"( &_argvec[0] ) \
918 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
919 lval = (__typeof__( lval ))_res; \
922 # define CALL_FN_W_W( lval, orig, arg1 ) \
924 volatile OrigFn _orig = ( orig ); \
925 volatile unsigned long _argvec[2]; \
926 volatile unsigned long _res; \
927 _argvec[0] = (unsigned long)_orig.nraddr; \
928 _argvec[1] = (unsigned long)( arg1 ); \
929 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
930 "pushl 4(%%eax)\n\t" \
931 "movl (%%eax), %%eax\n\t" \
932 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
934 : "a"( &_argvec[0] ) \
935 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
936 lval = (__typeof__( lval ))_res; \
939 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
941 volatile OrigFn _orig = ( orig ); \
942 volatile unsigned long _argvec[3]; \
943 volatile unsigned long _res; \
944 _argvec[0] = (unsigned long)_orig.nraddr; \
945 _argvec[1] = (unsigned long)( arg1 ); \
946 _argvec[2] = (unsigned long)( arg2 ); \
947 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
948 "pushl 8(%%eax)\n\t" \
949 "pushl 4(%%eax)\n\t" \
950 "movl (%%eax), %%eax\n\t" \
951 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
953 : "a"( &_argvec[0] ) \
954 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
955 lval = (__typeof__( lval ))_res; \
958 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
960 volatile OrigFn _orig = ( orig ); \
961 volatile unsigned long _argvec[4]; \
962 volatile unsigned long _res; \
963 _argvec[0] = (unsigned long)_orig.nraddr; \
964 _argvec[1] = (unsigned long)( arg1 ); \
965 _argvec[2] = (unsigned long)( arg2 ); \
966 _argvec[3] = (unsigned long)( arg3 ); \
967 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
968 "pushl 12(%%eax)\n\t" \
969 "pushl 8(%%eax)\n\t" \
970 "pushl 4(%%eax)\n\t" \
971 "movl (%%eax), %%eax\n\t" \
972 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
974 : "a"( &_argvec[0] ) \
975 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
976 lval = (__typeof__( lval ))_res; \
979 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
981 volatile OrigFn _orig = ( orig ); \
982 volatile unsigned long _argvec[5]; \
983 volatile unsigned long _res; \
984 _argvec[0] = (unsigned long)_orig.nraddr; \
985 _argvec[1] = (unsigned long)( arg1 ); \
986 _argvec[2] = (unsigned long)( arg2 ); \
987 _argvec[3] = (unsigned long)( arg3 ); \
988 _argvec[4] = (unsigned long)( arg4 ); \
989 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 16(%%eax)\n\t" \
990 "pushl 12(%%eax)\n\t" \
991 "pushl 8(%%eax)\n\t" \
992 "pushl 4(%%eax)\n\t" \
993 "movl (%%eax), %%eax\n\t" \
994 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
996 : "a"( &_argvec[0] ) \
997 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
998 lval = (__typeof__( lval ))_res; \
1001 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1003 volatile OrigFn _orig = ( orig ); \
1004 volatile unsigned long _argvec[6]; \
1005 volatile unsigned long _res; \
1006 _argvec[0] = (unsigned long)_orig.nraddr; \
1007 _argvec[1] = (unsigned long)( arg1 ); \
1008 _argvec[2] = (unsigned long)( arg2 ); \
1009 _argvec[3] = (unsigned long)( arg3 ); \
1010 _argvec[4] = (unsigned long)( arg4 ); \
1011 _argvec[5] = (unsigned long)( arg5 ); \
1012 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
1013 "pushl 20(%%eax)\n\t" \
1014 "pushl 16(%%eax)\n\t" \
1015 "pushl 12(%%eax)\n\t" \
1016 "pushl 8(%%eax)\n\t" \
1017 "pushl 4(%%eax)\n\t" \
1018 "movl (%%eax), %%eax\n\t" \
1019 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1021 : "a"( &_argvec[0] ) \
1022 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1023 lval = (__typeof__( lval ))_res; \
1026 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1028 volatile OrigFn _orig = ( orig ); \
1029 volatile unsigned long _argvec[7]; \
1030 volatile unsigned long _res; \
1031 _argvec[0] = (unsigned long)_orig.nraddr; \
1032 _argvec[1] = (unsigned long)( arg1 ); \
1033 _argvec[2] = (unsigned long)( arg2 ); \
1034 _argvec[3] = (unsigned long)( arg3 ); \
1035 _argvec[4] = (unsigned long)( arg4 ); \
1036 _argvec[5] = (unsigned long)( arg5 ); \
1037 _argvec[6] = (unsigned long)( arg6 ); \
1038 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
1039 "pushl 24(%%eax)\n\t" \
1040 "pushl 20(%%eax)\n\t" \
1041 "pushl 16(%%eax)\n\t" \
1042 "pushl 12(%%eax)\n\t" \
1043 "pushl 8(%%eax)\n\t" \
1044 "pushl 4(%%eax)\n\t" \
1045 "movl (%%eax), %%eax\n\t" \
1046 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1048 : "a"( &_argvec[0] ) \
1049 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1050 lval = (__typeof__( lval ))_res; \
1053 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1055 volatile OrigFn _orig = ( orig ); \
1056 volatile unsigned long _argvec[8]; \
1057 volatile unsigned long _res; \
1058 _argvec[0] = (unsigned long)_orig.nraddr; \
1059 _argvec[1] = (unsigned long)( arg1 ); \
1060 _argvec[2] = (unsigned long)( arg2 ); \
1061 _argvec[3] = (unsigned long)( arg3 ); \
1062 _argvec[4] = (unsigned long)( arg4 ); \
1063 _argvec[5] = (unsigned long)( arg5 ); \
1064 _argvec[6] = (unsigned long)( arg6 ); \
1065 _argvec[7] = (unsigned long)( arg7 ); \
1066 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1067 "pushl 28(%%eax)\n\t" \
1068 "pushl 24(%%eax)\n\t" \
1069 "pushl 20(%%eax)\n\t" \
1070 "pushl 16(%%eax)\n\t" \
1071 "pushl 12(%%eax)\n\t" \
1072 "pushl 8(%%eax)\n\t" \
1073 "pushl 4(%%eax)\n\t" \
1074 "movl (%%eax), %%eax\n\t" \
1075 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1077 : "a"( &_argvec[0] ) \
1078 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1079 lval = (__typeof__( lval ))_res; \
1082 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1084 volatile OrigFn _orig = ( orig ); \
1085 volatile unsigned long _argvec[9]; \
1086 volatile unsigned long _res; \
1087 _argvec[0] = (unsigned long)_orig.nraddr; \
1088 _argvec[1] = (unsigned long)( arg1 ); \
1089 _argvec[2] = (unsigned long)( arg2 ); \
1090 _argvec[3] = (unsigned long)( arg3 ); \
1091 _argvec[4] = (unsigned long)( arg4 ); \
1092 _argvec[5] = (unsigned long)( arg5 ); \
1093 _argvec[6] = (unsigned long)( arg6 ); \
1094 _argvec[7] = (unsigned long)( arg7 ); \
1095 _argvec[8] = (unsigned long)( arg8 ); \
1096 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 32(%%eax)\n\t" \
1097 "pushl 28(%%eax)\n\t" \
1098 "pushl 24(%%eax)\n\t" \
1099 "pushl 20(%%eax)\n\t" \
1100 "pushl 16(%%eax)\n\t" \
1101 "pushl 12(%%eax)\n\t" \
1102 "pushl 8(%%eax)\n\t" \
1103 "pushl 4(%%eax)\n\t" \
1104 "movl (%%eax), %%eax\n\t" \
1105 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1107 : "a"( &_argvec[0] ) \
1108 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1109 lval = (__typeof__( lval ))_res; \
1112 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1114 volatile OrigFn _orig = ( orig ); \
1115 volatile unsigned long _argvec[10]; \
1116 volatile unsigned long _res; \
1117 _argvec[0] = (unsigned long)_orig.nraddr; \
1118 _argvec[1] = (unsigned long)( arg1 ); \
1119 _argvec[2] = (unsigned long)( arg2 ); \
1120 _argvec[3] = (unsigned long)( arg3 ); \
1121 _argvec[4] = (unsigned long)( arg4 ); \
1122 _argvec[5] = (unsigned long)( arg5 ); \
1123 _argvec[6] = (unsigned long)( arg6 ); \
1124 _argvec[7] = (unsigned long)( arg7 ); \
1125 _argvec[8] = (unsigned long)( arg8 ); \
1126 _argvec[9] = (unsigned long)( arg9 ); \
1127 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
1128 "pushl 36(%%eax)\n\t" \
1129 "pushl 32(%%eax)\n\t" \
1130 "pushl 28(%%eax)\n\t" \
1131 "pushl 24(%%eax)\n\t" \
1132 "pushl 20(%%eax)\n\t" \
1133 "pushl 16(%%eax)\n\t" \
1134 "pushl 12(%%eax)\n\t" \
1135 "pushl 8(%%eax)\n\t" \
1136 "pushl 4(%%eax)\n\t" \
1137 "movl (%%eax), %%eax\n\t" \
1138 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1140 : "a"( &_argvec[0] ) \
1141 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1142 lval = (__typeof__( lval ))_res; \
1145 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
1147 volatile OrigFn _orig = ( orig ); \
1148 volatile unsigned long _argvec[11]; \
1149 volatile unsigned long _res; \
1150 _argvec[0] = (unsigned long)_orig.nraddr; \
1151 _argvec[1] = (unsigned long)( arg1 ); \
1152 _argvec[2] = (unsigned long)( arg2 ); \
1153 _argvec[3] = (unsigned long)( arg3 ); \
1154 _argvec[4] = (unsigned long)( arg4 ); \
1155 _argvec[5] = (unsigned long)( arg5 ); \
1156 _argvec[6] = (unsigned long)( arg6 ); \
1157 _argvec[7] = (unsigned long)( arg7 ); \
1158 _argvec[8] = (unsigned long)( arg8 ); \
1159 _argvec[9] = (unsigned long)( arg9 ); \
1160 _argvec[10] = (unsigned long)( arg10 ); \
1161 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
1162 "pushl 40(%%eax)\n\t" \
1163 "pushl 36(%%eax)\n\t" \
1164 "pushl 32(%%eax)\n\t" \
1165 "pushl 28(%%eax)\n\t" \
1166 "pushl 24(%%eax)\n\t" \
1167 "pushl 20(%%eax)\n\t" \
1168 "pushl 16(%%eax)\n\t" \
1169 "pushl 12(%%eax)\n\t" \
1170 "pushl 8(%%eax)\n\t" \
1171 "pushl 4(%%eax)\n\t" \
1172 "movl (%%eax), %%eax\n\t" \
1173 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1175 : "a"( &_argvec[0] ) \
1176 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1177 lval = (__typeof__( lval ))_res; \
1180 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
1182 volatile OrigFn _orig = ( orig ); \
1183 volatile unsigned long _argvec[12]; \
1184 volatile unsigned long _res; \
1185 _argvec[0] = (unsigned long)_orig.nraddr; \
1186 _argvec[1] = (unsigned long)( arg1 ); \
1187 _argvec[2] = (unsigned long)( arg2 ); \
1188 _argvec[3] = (unsigned long)( arg3 ); \
1189 _argvec[4] = (unsigned long)( arg4 ); \
1190 _argvec[5] = (unsigned long)( arg5 ); \
1191 _argvec[6] = (unsigned long)( arg6 ); \
1192 _argvec[7] = (unsigned long)( arg7 ); \
1193 _argvec[8] = (unsigned long)( arg8 ); \
1194 _argvec[9] = (unsigned long)( arg9 ); \
1195 _argvec[10] = (unsigned long)( arg10 ); \
1196 _argvec[11] = (unsigned long)( arg11 ); \
1197 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1198 "pushl 44(%%eax)\n\t" \
1199 "pushl 40(%%eax)\n\t" \
1200 "pushl 36(%%eax)\n\t" \
1201 "pushl 32(%%eax)\n\t" \
1202 "pushl 28(%%eax)\n\t" \
1203 "pushl 24(%%eax)\n\t" \
1204 "pushl 20(%%eax)\n\t" \
1205 "pushl 16(%%eax)\n\t" \
1206 "pushl 12(%%eax)\n\t" \
1207 "pushl 8(%%eax)\n\t" \
1208 "pushl 4(%%eax)\n\t" \
1209 "movl (%%eax), %%eax\n\t" \
1210 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1212 : "a"( &_argvec[0] ) \
1213 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1214 lval = (__typeof__( lval ))_res; \
1217 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
1219 volatile OrigFn _orig = ( orig ); \
1220 volatile unsigned long _argvec[13]; \
1221 volatile unsigned long _res; \
1222 _argvec[0] = (unsigned long)_orig.nraddr; \
1223 _argvec[1] = (unsigned long)( arg1 ); \
1224 _argvec[2] = (unsigned long)( arg2 ); \
1225 _argvec[3] = (unsigned long)( arg3 ); \
1226 _argvec[4] = (unsigned long)( arg4 ); \
1227 _argvec[5] = (unsigned long)( arg5 ); \
1228 _argvec[6] = (unsigned long)( arg6 ); \
1229 _argvec[7] = (unsigned long)( arg7 ); \
1230 _argvec[8] = (unsigned long)( arg8 ); \
1231 _argvec[9] = (unsigned long)( arg9 ); \
1232 _argvec[10] = (unsigned long)( arg10 ); \
1233 _argvec[11] = (unsigned long)( arg11 ); \
1234 _argvec[12] = (unsigned long)( arg12 ); \
1235 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 48(%%eax)\n\t" \
1236 "pushl 44(%%eax)\n\t" \
1237 "pushl 40(%%eax)\n\t" \
1238 "pushl 36(%%eax)\n\t" \
1239 "pushl 32(%%eax)\n\t" \
1240 "pushl 28(%%eax)\n\t" \
1241 "pushl 24(%%eax)\n\t" \
1242 "pushl 20(%%eax)\n\t" \
1243 "pushl 16(%%eax)\n\t" \
1244 "pushl 12(%%eax)\n\t" \
1245 "pushl 8(%%eax)\n\t" \
1246 "pushl 4(%%eax)\n\t" \
1247 "movl (%%eax), %%eax\n\t" \
1248 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1250 : "a"( &_argvec[0] ) \
1251 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \
1252 lval = (__typeof__( lval ))_res; \
1259 #if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin )
1264 # define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11"
1320 # if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM )
1321 # define __FRAME_POINTER , "r"( __builtin_dwarf_cfa() )
1322 # define VALGRIND_CFI_PROLOGUE \
1323 "movq %%rbp, %%r15\n\t" \
1324 "movq %2, %%rbp\n\t" \
1325 ".cfi_remember_state\n\t" \
1326 ".cfi_def_cfa rbp, 0\n\t"
1327 # define VALGRIND_CFI_EPILOGUE \
1328 "movq %%r15, %%rbp\n\t" \
1329 ".cfi_restore_state\n\t"
1331 # define __FRAME_POINTER
1332 # define VALGRIND_CFI_PROLOGUE
1333 # define VALGRIND_CFI_EPILOGUE
1341 # define VALGRIND_ALIGN_STACK \
1342 "movq %%rsp,%%r14\n\t" \
1343 "andq $0xfffffffffffffff0,%%rsp\n\t"
1344 # define VALGRIND_RESTORE_STACK "movq %%r14,%%rsp\n\t"
1370 # define CALL_FN_W_v( lval, orig ) \
1372 volatile OrigFn _orig = ( orig ); \
1373 volatile unsigned long _argvec[1]; \
1374 volatile unsigned long _res; \
1375 _argvec[0] = (unsigned long)_orig.nraddr; \
1376 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1377 "movq (%%rax), %%rax\n\t" \
1378 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1380 : "a"(&_argvec[0])__FRAME_POINTER \
1381 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1382 lval = (__typeof__( lval ))_res; \
1385 # define CALL_FN_W_W( lval, orig, arg1 ) \
1387 volatile OrigFn _orig = ( orig ); \
1388 volatile unsigned long _argvec[2]; \
1389 volatile unsigned long _res; \
1390 _argvec[0] = (unsigned long)_orig.nraddr; \
1391 _argvec[1] = (unsigned long)( arg1 ); \
1392 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1393 "movq 8(%%rax), %%rdi\n\t" \
1394 "movq (%%rax), %%rax\n\t" \
1395 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1397 : "a"(&_argvec[0])__FRAME_POINTER \
1398 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1399 lval = (__typeof__( lval ))_res; \
1402 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
1404 volatile OrigFn _orig = ( orig ); \
1405 volatile unsigned long _argvec[3]; \
1406 volatile unsigned long _res; \
1407 _argvec[0] = (unsigned long)_orig.nraddr; \
1408 _argvec[1] = (unsigned long)( arg1 ); \
1409 _argvec[2] = (unsigned long)( arg2 ); \
1410 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1411 "movq 16(%%rax), %%rsi\n\t" \
1412 "movq 8(%%rax), %%rdi\n\t" \
1413 "movq (%%rax), %%rax\n\t" \
1414 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1416 : "a"(&_argvec[0])__FRAME_POINTER \
1417 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1418 lval = (__typeof__( lval ))_res; \
1421 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
1423 volatile OrigFn _orig = ( orig ); \
1424 volatile unsigned long _argvec[4]; \
1425 volatile unsigned long _res; \
1426 _argvec[0] = (unsigned long)_orig.nraddr; \
1427 _argvec[1] = (unsigned long)( arg1 ); \
1428 _argvec[2] = (unsigned long)( arg2 ); \
1429 _argvec[3] = (unsigned long)( arg3 ); \
1430 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1431 "movq 24(%%rax), %%rdx\n\t" \
1432 "movq 16(%%rax), %%rsi\n\t" \
1433 "movq 8(%%rax), %%rdi\n\t" \
1434 "movq (%%rax), %%rax\n\t" \
1435 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1437 : "a"(&_argvec[0])__FRAME_POINTER \
1438 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1439 lval = (__typeof__( lval ))_res; \
1442 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
1444 volatile OrigFn _orig = ( orig ); \
1445 volatile unsigned long _argvec[5]; \
1446 volatile unsigned long _res; \
1447 _argvec[0] = (unsigned long)_orig.nraddr; \
1448 _argvec[1] = (unsigned long)( arg1 ); \
1449 _argvec[2] = (unsigned long)( arg2 ); \
1450 _argvec[3] = (unsigned long)( arg3 ); \
1451 _argvec[4] = (unsigned long)( arg4 ); \
1452 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1453 "movq 32(%%rax), %%rcx\n\t" \
1454 "movq 24(%%rax), %%rdx\n\t" \
1455 "movq 16(%%rax), %%rsi\n\t" \
1456 "movq 8(%%rax), %%rdi\n\t" \
1457 "movq (%%rax), %%rax\n\t" \
1458 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1460 : "a"(&_argvec[0])__FRAME_POINTER \
1461 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1462 lval = (__typeof__( lval ))_res; \
1465 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1467 volatile OrigFn _orig = ( orig ); \
1468 volatile unsigned long _argvec[6]; \
1469 volatile unsigned long _res; \
1470 _argvec[0] = (unsigned long)_orig.nraddr; \
1471 _argvec[1] = (unsigned long)( arg1 ); \
1472 _argvec[2] = (unsigned long)( arg2 ); \
1473 _argvec[3] = (unsigned long)( arg3 ); \
1474 _argvec[4] = (unsigned long)( arg4 ); \
1475 _argvec[5] = (unsigned long)( arg5 ); \
1476 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1477 "movq 40(%%rax), %%r8\n\t" \
1478 "movq 32(%%rax), %%rcx\n\t" \
1479 "movq 24(%%rax), %%rdx\n\t" \
1480 "movq 16(%%rax), %%rsi\n\t" \
1481 "movq 8(%%rax), %%rdi\n\t" \
1482 "movq (%%rax), %%rax\n\t" \
1483 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1485 : "a"(&_argvec[0])__FRAME_POINTER \
1486 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1487 lval = (__typeof__( lval ))_res; \
1490 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1492 volatile OrigFn _orig = ( orig ); \
1493 volatile unsigned long _argvec[7]; \
1494 volatile unsigned long _res; \
1495 _argvec[0] = (unsigned long)_orig.nraddr; \
1496 _argvec[1] = (unsigned long)( arg1 ); \
1497 _argvec[2] = (unsigned long)( arg2 ); \
1498 _argvec[3] = (unsigned long)( arg3 ); \
1499 _argvec[4] = (unsigned long)( arg4 ); \
1500 _argvec[5] = (unsigned long)( arg5 ); \
1501 _argvec[6] = (unsigned long)( arg6 ); \
1502 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1503 "movq 48(%%rax), %%r9\n\t" \
1504 "movq 40(%%rax), %%r8\n\t" \
1505 "movq 32(%%rax), %%rcx\n\t" \
1506 "movq 24(%%rax), %%rdx\n\t" \
1507 "movq 16(%%rax), %%rsi\n\t" \
1508 "movq 8(%%rax), %%rdi\n\t" \
1509 "movq (%%rax), %%rax\n\t" \
1510 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1512 : "a"(&_argvec[0])__FRAME_POINTER \
1513 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1514 lval = (__typeof__( lval ))_res; \
1517 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1519 volatile OrigFn _orig = ( orig ); \
1520 volatile unsigned long _argvec[8]; \
1521 volatile unsigned long _res; \
1522 _argvec[0] = (unsigned long)_orig.nraddr; \
1523 _argvec[1] = (unsigned long)( arg1 ); \
1524 _argvec[2] = (unsigned long)( arg2 ); \
1525 _argvec[3] = (unsigned long)( arg3 ); \
1526 _argvec[4] = (unsigned long)( arg4 ); \
1527 _argvec[5] = (unsigned long)( arg5 ); \
1528 _argvec[6] = (unsigned long)( arg6 ); \
1529 _argvec[7] = (unsigned long)( arg7 ); \
1530 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1531 "pushq 56(%%rax)\n\t" \
1532 "movq 48(%%rax), %%r9\n\t" \
1533 "movq 40(%%rax), %%r8\n\t" \
1534 "movq 32(%%rax), %%rcx\n\t" \
1535 "movq 24(%%rax), %%rdx\n\t" \
1536 "movq 16(%%rax), %%rsi\n\t" \
1537 "movq 8(%%rax), %%rdi\n\t" \
1538 "movq (%%rax), %%rax\n\t" \
1539 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1541 : "a"(&_argvec[0])__FRAME_POINTER \
1542 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1543 lval = (__typeof__( lval ))_res; \
1546 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1548 volatile OrigFn _orig = ( orig ); \
1549 volatile unsigned long _argvec[9]; \
1550 volatile unsigned long _res; \
1551 _argvec[0] = (unsigned long)_orig.nraddr; \
1552 _argvec[1] = (unsigned long)( arg1 ); \
1553 _argvec[2] = (unsigned long)( arg2 ); \
1554 _argvec[3] = (unsigned long)( arg3 ); \
1555 _argvec[4] = (unsigned long)( arg4 ); \
1556 _argvec[5] = (unsigned long)( arg5 ); \
1557 _argvec[6] = (unsigned long)( arg6 ); \
1558 _argvec[7] = (unsigned long)( arg7 ); \
1559 _argvec[8] = (unsigned long)( arg8 ); \
1560 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1561 "pushq 64(%%rax)\n\t" \
1562 "pushq 56(%%rax)\n\t" \
1563 "movq 48(%%rax), %%r9\n\t" \
1564 "movq 40(%%rax), %%r8\n\t" \
1565 "movq 32(%%rax), %%rcx\n\t" \
1566 "movq 24(%%rax), %%rdx\n\t" \
1567 "movq 16(%%rax), %%rsi\n\t" \
1568 "movq 8(%%rax), %%rdi\n\t" \
1569 "movq (%%rax), %%rax\n\t" \
1570 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1572 : "a"(&_argvec[0])__FRAME_POINTER \
1573 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1574 lval = (__typeof__( lval ))_res; \
1577 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1579 volatile OrigFn _orig = ( orig ); \
1580 volatile unsigned long _argvec[10]; \
1581 volatile unsigned long _res; \
1582 _argvec[0] = (unsigned long)_orig.nraddr; \
1583 _argvec[1] = (unsigned long)( arg1 ); \
1584 _argvec[2] = (unsigned long)( arg2 ); \
1585 _argvec[3] = (unsigned long)( arg3 ); \
1586 _argvec[4] = (unsigned long)( arg4 ); \
1587 _argvec[5] = (unsigned long)( arg5 ); \
1588 _argvec[6] = (unsigned long)( arg6 ); \
1589 _argvec[7] = (unsigned long)( arg7 ); \
1590 _argvec[8] = (unsigned long)( arg8 ); \
1591 _argvec[9] = (unsigned long)( arg9 ); \
1592 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1593 "pushq 72(%%rax)\n\t" \
1594 "pushq 64(%%rax)\n\t" \
1595 "pushq 56(%%rax)\n\t" \
1596 "movq 48(%%rax), %%r9\n\t" \
1597 "movq 40(%%rax), %%r8\n\t" \
1598 "movq 32(%%rax), %%rcx\n\t" \
1599 "movq 24(%%rax), %%rdx\n\t" \
1600 "movq 16(%%rax), %%rsi\n\t" \
1601 "movq 8(%%rax), %%rdi\n\t" \
1602 "movq (%%rax), %%rax\n\t" \
1603 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1605 : "a"(&_argvec[0])__FRAME_POINTER \
1606 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1607 lval = (__typeof__( lval ))_res; \
1610 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
1612 volatile OrigFn _orig = ( orig ); \
1613 volatile unsigned long _argvec[11]; \
1614 volatile unsigned long _res; \
1615 _argvec[0] = (unsigned long)_orig.nraddr; \
1616 _argvec[1] = (unsigned long)( arg1 ); \
1617 _argvec[2] = (unsigned long)( arg2 ); \
1618 _argvec[3] = (unsigned long)( arg3 ); \
1619 _argvec[4] = (unsigned long)( arg4 ); \
1620 _argvec[5] = (unsigned long)( arg5 ); \
1621 _argvec[6] = (unsigned long)( arg6 ); \
1622 _argvec[7] = (unsigned long)( arg7 ); \
1623 _argvec[8] = (unsigned long)( arg8 ); \
1624 _argvec[9] = (unsigned long)( arg9 ); \
1625 _argvec[10] = (unsigned long)( arg10 ); \
1626 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1627 "pushq 80(%%rax)\n\t" \
1628 "pushq 72(%%rax)\n\t" \
1629 "pushq 64(%%rax)\n\t" \
1630 "pushq 56(%%rax)\n\t" \
1631 "movq 48(%%rax), %%r9\n\t" \
1632 "movq 40(%%rax), %%r8\n\t" \
1633 "movq 32(%%rax), %%rcx\n\t" \
1634 "movq 24(%%rax), %%rdx\n\t" \
1635 "movq 16(%%rax), %%rsi\n\t" \
1636 "movq 8(%%rax), %%rdi\n\t" \
1637 "movq (%%rax), %%rax\n\t" \
1638 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1640 : "a"(&_argvec[0])__FRAME_POINTER \
1641 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1642 lval = (__typeof__( lval ))_res; \
1645 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
1647 volatile OrigFn _orig = ( orig ); \
1648 volatile unsigned long _argvec[12]; \
1649 volatile unsigned long _res; \
1650 _argvec[0] = (unsigned long)_orig.nraddr; \
1651 _argvec[1] = (unsigned long)( arg1 ); \
1652 _argvec[2] = (unsigned long)( arg2 ); \
1653 _argvec[3] = (unsigned long)( arg3 ); \
1654 _argvec[4] = (unsigned long)( arg4 ); \
1655 _argvec[5] = (unsigned long)( arg5 ); \
1656 _argvec[6] = (unsigned long)( arg6 ); \
1657 _argvec[7] = (unsigned long)( arg7 ); \
1658 _argvec[8] = (unsigned long)( arg8 ); \
1659 _argvec[9] = (unsigned long)( arg9 ); \
1660 _argvec[10] = (unsigned long)( arg10 ); \
1661 _argvec[11] = (unsigned long)( arg11 ); \
1662 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1663 "pushq 88(%%rax)\n\t" \
1664 "pushq 80(%%rax)\n\t" \
1665 "pushq 72(%%rax)\n\t" \
1666 "pushq 64(%%rax)\n\t" \
1667 "pushq 56(%%rax)\n\t" \
1668 "movq 48(%%rax), %%r9\n\t" \
1669 "movq 40(%%rax), %%r8\n\t" \
1670 "movq 32(%%rax), %%rcx\n\t" \
1671 "movq 24(%%rax), %%rdx\n\t" \
1672 "movq 16(%%rax), %%rsi\n\t" \
1673 "movq 8(%%rax), %%rdi\n\t" \
1674 "movq (%%rax), %%rax\n\t" \
1675 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1677 : "a"(&_argvec[0])__FRAME_POINTER \
1678 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1679 lval = (__typeof__( lval ))_res; \
1682 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
1684 volatile OrigFn _orig = ( orig ); \
1685 volatile unsigned long _argvec[13]; \
1686 volatile unsigned long _res; \
1687 _argvec[0] = (unsigned long)_orig.nraddr; \
1688 _argvec[1] = (unsigned long)( arg1 ); \
1689 _argvec[2] = (unsigned long)( arg2 ); \
1690 _argvec[3] = (unsigned long)( arg3 ); \
1691 _argvec[4] = (unsigned long)( arg4 ); \
1692 _argvec[5] = (unsigned long)( arg5 ); \
1693 _argvec[6] = (unsigned long)( arg6 ); \
1694 _argvec[7] = (unsigned long)( arg7 ); \
1695 _argvec[8] = (unsigned long)( arg8 ); \
1696 _argvec[9] = (unsigned long)( arg9 ); \
1697 _argvec[10] = (unsigned long)( arg10 ); \
1698 _argvec[11] = (unsigned long)( arg11 ); \
1699 _argvec[12] = (unsigned long)( arg12 ); \
1700 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1701 "pushq 96(%%rax)\n\t" \
1702 "pushq 88(%%rax)\n\t" \
1703 "pushq 80(%%rax)\n\t" \
1704 "pushq 72(%%rax)\n\t" \
1705 "pushq 64(%%rax)\n\t" \
1706 "pushq 56(%%rax)\n\t" \
1707 "movq 48(%%rax), %%r9\n\t" \
1708 "movq 40(%%rax), %%r8\n\t" \
1709 "movq 32(%%rax), %%rcx\n\t" \
1710 "movq 24(%%rax), %%rdx\n\t" \
1711 "movq 16(%%rax), %%rsi\n\t" \
1712 "movq 8(%%rax), %%rdi\n\t" \
1713 "movq (%%rax), %%rax\n\t" \
1714 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1716 : "a"(&_argvec[0])__FRAME_POINTER \
1717 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \
1718 lval = (__typeof__( lval ))_res; \
1725 #if defined( PLAT_ppc32_linux )
1751 # define __CALLER_SAVED_REGS \
1752 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
1753 "r7", "r8", "r9", "r10", "r11", "r12", "r13"
1760 # define VALGRIND_ALIGN_STACK \
1762 "rlwinm 1,1,0,0,27\n\t"
1763 # define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
1768 # define CALL_FN_W_v( lval, orig ) \
1770 volatile OrigFn _orig = ( orig ); \
1771 volatile unsigned long _argvec[1]; \
1772 volatile unsigned long _res; \
1773 _argvec[0] = (unsigned long)_orig.nraddr; \
1774 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1775 "lwz 11,0(11)\n\t" \
1776 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1778 : "r"( &_argvec[0] ) \
1779 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1780 lval = (__typeof__( lval ))_res; \
1783 # define CALL_FN_W_W( lval, orig, arg1 ) \
1785 volatile OrigFn _orig = ( orig ); \
1786 volatile unsigned long _argvec[2]; \
1787 volatile unsigned long _res; \
1788 _argvec[0] = (unsigned long)_orig.nraddr; \
1789 _argvec[1] = (unsigned long)arg1; \
1790 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1792 "lwz 11,0(11)\n\t" \
1793 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1795 : "r"( &_argvec[0] ) \
1796 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1797 lval = (__typeof__( lval ))_res; \
1800 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
1802 volatile OrigFn _orig = ( orig ); \
1803 volatile unsigned long _argvec[3]; \
1804 volatile unsigned long _res; \
1805 _argvec[0] = (unsigned long)_orig.nraddr; \
1806 _argvec[1] = (unsigned long)arg1; \
1807 _argvec[2] = (unsigned long)arg2; \
1808 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1811 "lwz 11,0(11)\n\t" \
1812 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1814 : "r"( &_argvec[0] ) \
1815 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1816 lval = (__typeof__( lval ))_res; \
1819 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
1821 volatile OrigFn _orig = ( orig ); \
1822 volatile unsigned long _argvec[4]; \
1823 volatile unsigned long _res; \
1824 _argvec[0] = (unsigned long)_orig.nraddr; \
1825 _argvec[1] = (unsigned long)arg1; \
1826 _argvec[2] = (unsigned long)arg2; \
1827 _argvec[3] = (unsigned long)arg3; \
1828 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1831 "lwz 5,12(11)\n\t" \
1832 "lwz 11,0(11)\n\t" \
1833 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1835 : "r"( &_argvec[0] ) \
1836 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1837 lval = (__typeof__( lval ))_res; \
1840 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
1842 volatile OrigFn _orig = ( orig ); \
1843 volatile unsigned long _argvec[5]; \
1844 volatile unsigned long _res; \
1845 _argvec[0] = (unsigned long)_orig.nraddr; \
1846 _argvec[1] = (unsigned long)arg1; \
1847 _argvec[2] = (unsigned long)arg2; \
1848 _argvec[3] = (unsigned long)arg3; \
1849 _argvec[4] = (unsigned long)arg4; \
1850 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1853 "lwz 5,12(11)\n\t" \
1854 "lwz 6,16(11)\n\t" \
1855 "lwz 11,0(11)\n\t" \
1856 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1858 : "r"( &_argvec[0] ) \
1859 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1860 lval = (__typeof__( lval ))_res; \
1863 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
1865 volatile OrigFn _orig = ( orig ); \
1866 volatile unsigned long _argvec[6]; \
1867 volatile unsigned long _res; \
1868 _argvec[0] = (unsigned long)_orig.nraddr; \
1869 _argvec[1] = (unsigned long)arg1; \
1870 _argvec[2] = (unsigned long)arg2; \
1871 _argvec[3] = (unsigned long)arg3; \
1872 _argvec[4] = (unsigned long)arg4; \
1873 _argvec[5] = (unsigned long)arg5; \
1874 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1877 "lwz 5,12(11)\n\t" \
1878 "lwz 6,16(11)\n\t" \
1879 "lwz 7,20(11)\n\t" \
1880 "lwz 11,0(11)\n\t" \
1881 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1883 : "r"( &_argvec[0] ) \
1884 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1885 lval = (__typeof__( lval ))_res; \
1888 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
1890 volatile OrigFn _orig = ( orig ); \
1891 volatile unsigned long _argvec[7]; \
1892 volatile unsigned long _res; \
1893 _argvec[0] = (unsigned long)_orig.nraddr; \
1894 _argvec[1] = (unsigned long)arg1; \
1895 _argvec[2] = (unsigned long)arg2; \
1896 _argvec[3] = (unsigned long)arg3; \
1897 _argvec[4] = (unsigned long)arg4; \
1898 _argvec[5] = (unsigned long)arg5; \
1899 _argvec[6] = (unsigned long)arg6; \
1900 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1903 "lwz 5,12(11)\n\t" \
1904 "lwz 6,16(11)\n\t" \
1905 "lwz 7,20(11)\n\t" \
1906 "lwz 8,24(11)\n\t" \
1907 "lwz 11,0(11)\n\t" \
1908 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1910 : "r"( &_argvec[0] ) \
1911 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1912 lval = (__typeof__( lval ))_res; \
1915 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
1917 volatile OrigFn _orig = ( orig ); \
1918 volatile unsigned long _argvec[8]; \
1919 volatile unsigned long _res; \
1920 _argvec[0] = (unsigned long)_orig.nraddr; \
1921 _argvec[1] = (unsigned long)arg1; \
1922 _argvec[2] = (unsigned long)arg2; \
1923 _argvec[3] = (unsigned long)arg3; \
1924 _argvec[4] = (unsigned long)arg4; \
1925 _argvec[5] = (unsigned long)arg5; \
1926 _argvec[6] = (unsigned long)arg6; \
1927 _argvec[7] = (unsigned long)arg7; \
1928 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1931 "lwz 5,12(11)\n\t" \
1932 "lwz 6,16(11)\n\t" \
1933 "lwz 7,20(11)\n\t" \
1934 "lwz 8,24(11)\n\t" \
1935 "lwz 9,28(11)\n\t" \
1936 "lwz 11,0(11)\n\t" \
1937 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1939 : "r"( &_argvec[0] ) \
1940 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1941 lval = (__typeof__( lval ))_res; \
1944 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
1946 volatile OrigFn _orig = ( orig ); \
1947 volatile unsigned long _argvec[9]; \
1948 volatile unsigned long _res; \
1949 _argvec[0] = (unsigned long)_orig.nraddr; \
1950 _argvec[1] = (unsigned long)arg1; \
1951 _argvec[2] = (unsigned long)arg2; \
1952 _argvec[3] = (unsigned long)arg3; \
1953 _argvec[4] = (unsigned long)arg4; \
1954 _argvec[5] = (unsigned long)arg5; \
1955 _argvec[6] = (unsigned long)arg6; \
1956 _argvec[7] = (unsigned long)arg7; \
1957 _argvec[8] = (unsigned long)arg8; \
1958 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1961 "lwz 5,12(11)\n\t" \
1962 "lwz 6,16(11)\n\t" \
1963 "lwz 7,20(11)\n\t" \
1964 "lwz 8,24(11)\n\t" \
1965 "lwz 9,28(11)\n\t" \
1966 "lwz 10,32(11)\n\t" \
1967 "lwz 11,0(11)\n\t" \
1968 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1970 : "r"( &_argvec[0] ) \
1971 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
1972 lval = (__typeof__( lval ))_res; \
1975 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
1977 volatile OrigFn _orig = ( orig ); \
1978 volatile unsigned long _argvec[10]; \
1979 volatile unsigned long _res; \
1980 _argvec[0] = (unsigned long)_orig.nraddr; \
1981 _argvec[1] = (unsigned long)arg1; \
1982 _argvec[2] = (unsigned long)arg2; \
1983 _argvec[3] = (unsigned long)arg3; \
1984 _argvec[4] = (unsigned long)arg4; \
1985 _argvec[5] = (unsigned long)arg5; \
1986 _argvec[6] = (unsigned long)arg6; \
1987 _argvec[7] = (unsigned long)arg7; \
1988 _argvec[8] = (unsigned long)arg8; \
1989 _argvec[9] = (unsigned long)arg9; \
1990 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1991 "addi 1,1,-16\n\t" \
1992 "lwz 3,36(11)\n\t" \
1996 "lwz 5,12(11)\n\t" \
1997 "lwz 6,16(11)\n\t" \
1998 "lwz 7,20(11)\n\t" \
1999 "lwz 8,24(11)\n\t" \
2000 "lwz 9,28(11)\n\t" \
2001 "lwz 10,32(11)\n\t" \
2002 "lwz 11,0(11)\n\t" \
2003 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2005 : "r"( &_argvec[0] ) \
2006 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2007 lval = (__typeof__( lval ))_res; \
2010 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2012 volatile OrigFn _orig = ( orig ); \
2013 volatile unsigned long _argvec[11]; \
2014 volatile unsigned long _res; \
2015 _argvec[0] = (unsigned long)_orig.nraddr; \
2016 _argvec[1] = (unsigned long)arg1; \
2017 _argvec[2] = (unsigned long)arg2; \
2018 _argvec[3] = (unsigned long)arg3; \
2019 _argvec[4] = (unsigned long)arg4; \
2020 _argvec[5] = (unsigned long)arg5; \
2021 _argvec[6] = (unsigned long)arg6; \
2022 _argvec[7] = (unsigned long)arg7; \
2023 _argvec[8] = (unsigned long)arg8; \
2024 _argvec[9] = (unsigned long)arg9; \
2025 _argvec[10] = (unsigned long)arg10; \
2026 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2027 "addi 1,1,-16\n\t" \
2028 "lwz 3,40(11)\n\t" \
2030 "lwz 3,36(11)\n\t" \
2034 "lwz 5,12(11)\n\t" \
2035 "lwz 6,16(11)\n\t" \
2036 "lwz 7,20(11)\n\t" \
2037 "lwz 8,24(11)\n\t" \
2038 "lwz 9,28(11)\n\t" \
2039 "lwz 10,32(11)\n\t" \
2040 "lwz 11,0(11)\n\t" \
2041 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2043 : "r"( &_argvec[0] ) \
2044 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2045 lval = (__typeof__( lval ))_res; \
2048 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2050 volatile OrigFn _orig = ( orig ); \
2051 volatile unsigned long _argvec[12]; \
2052 volatile unsigned long _res; \
2053 _argvec[0] = (unsigned long)_orig.nraddr; \
2054 _argvec[1] = (unsigned long)arg1; \
2055 _argvec[2] = (unsigned long)arg2; \
2056 _argvec[3] = (unsigned long)arg3; \
2057 _argvec[4] = (unsigned long)arg4; \
2058 _argvec[5] = (unsigned long)arg5; \
2059 _argvec[6] = (unsigned long)arg6; \
2060 _argvec[7] = (unsigned long)arg7; \
2061 _argvec[8] = (unsigned long)arg8; \
2062 _argvec[9] = (unsigned long)arg9; \
2063 _argvec[10] = (unsigned long)arg10; \
2064 _argvec[11] = (unsigned long)arg11; \
2065 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2066 "addi 1,1,-32\n\t" \
2067 "lwz 3,44(11)\n\t" \
2069 "lwz 3,40(11)\n\t" \
2071 "lwz 3,36(11)\n\t" \
2075 "lwz 5,12(11)\n\t" \
2076 "lwz 6,16(11)\n\t" \
2077 "lwz 7,20(11)\n\t" \
2078 "lwz 8,24(11)\n\t" \
2079 "lwz 9,28(11)\n\t" \
2080 "lwz 10,32(11)\n\t" \
2081 "lwz 11,0(11)\n\t" \
2082 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2084 : "r"( &_argvec[0] ) \
2085 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2086 lval = (__typeof__( lval ))_res; \
2089 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2091 volatile OrigFn _orig = ( orig ); \
2092 volatile unsigned long _argvec[13]; \
2093 volatile unsigned long _res; \
2094 _argvec[0] = (unsigned long)_orig.nraddr; \
2095 _argvec[1] = (unsigned long)arg1; \
2096 _argvec[2] = (unsigned long)arg2; \
2097 _argvec[3] = (unsigned long)arg3; \
2098 _argvec[4] = (unsigned long)arg4; \
2099 _argvec[5] = (unsigned long)arg5; \
2100 _argvec[6] = (unsigned long)arg6; \
2101 _argvec[7] = (unsigned long)arg7; \
2102 _argvec[8] = (unsigned long)arg8; \
2103 _argvec[9] = (unsigned long)arg9; \
2104 _argvec[10] = (unsigned long)arg10; \
2105 _argvec[11] = (unsigned long)arg11; \
2106 _argvec[12] = (unsigned long)arg12; \
2107 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2108 "addi 1,1,-32\n\t" \
2109 "lwz 3,48(11)\n\t" \
2111 "lwz 3,44(11)\n\t" \
2113 "lwz 3,40(11)\n\t" \
2115 "lwz 3,36(11)\n\t" \
2119 "lwz 5,12(11)\n\t" \
2120 "lwz 6,16(11)\n\t" \
2121 "lwz 7,20(11)\n\t" \
2122 "lwz 8,24(11)\n\t" \
2123 "lwz 9,28(11)\n\t" \
2124 "lwz 10,32(11)\n\t" \
2125 "lwz 11,0(11)\n\t" \
2126 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2128 : "r"( &_argvec[0] ) \
2129 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2130 lval = (__typeof__( lval ))_res; \
2137 #if defined( PLAT_ppc64_linux )
2142 # define __CALLER_SAVED_REGS \
2143 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
2144 "r7", "r8", "r9", "r10", "r11", "r12", "r13"
2151 # define VALGRIND_ALIGN_STACK \
2153 "rldicr 1,1,0,59\n\t"
2154 # define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
2159 # define CALL_FN_W_v( lval, orig ) \
2161 volatile OrigFn _orig = ( orig ); \
2162 volatile unsigned long _argvec[3 + 0]; \
2163 volatile unsigned long _res; \
2165 _argvec[1] = (unsigned long)_orig.r2; \
2166 _argvec[2] = (unsigned long)_orig.nraddr; \
2167 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2168 "std 2,-16(11)\n\t" \
2170 "ld 11, 0(11)\n\t" \
2171 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2173 "ld 2,-16(11)\n\t" \
2174 VALGRIND_RESTORE_STACK \
2176 : "r"( &_argvec[2] ) \
2177 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2178 lval = (__typeof__( lval ))_res; \
2181 # define CALL_FN_W_W( lval, orig, arg1 ) \
2183 volatile OrigFn _orig = ( orig ); \
2184 volatile unsigned long _argvec[3 + 1]; \
2185 volatile unsigned long _res; \
2187 _argvec[1] = (unsigned long)_orig.r2; \
2188 _argvec[2] = (unsigned long)_orig.nraddr; \
2189 _argvec[2 + 1] = (unsigned long)arg1; \
2190 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2191 "std 2,-16(11)\n\t" \
2194 "ld 11, 0(11)\n\t" \
2195 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2197 "ld 2,-16(11)\n\t" \
2198 VALGRIND_RESTORE_STACK \
2200 : "r"( &_argvec[2] ) \
2201 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2202 lval = (__typeof__( lval ))_res; \
2205 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
2207 volatile OrigFn _orig = ( orig ); \
2208 volatile unsigned long _argvec[3 + 2]; \
2209 volatile unsigned long _res; \
2211 _argvec[1] = (unsigned long)_orig.r2; \
2212 _argvec[2] = (unsigned long)_orig.nraddr; \
2213 _argvec[2 + 1] = (unsigned long)arg1; \
2214 _argvec[2 + 2] = (unsigned long)arg2; \
2215 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2216 "std 2,-16(11)\n\t" \
2219 "ld 4, 16(11)\n\t" \
2220 "ld 11, 0(11)\n\t" \
2221 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2223 "ld 2,-16(11)\n\t" \
2224 VALGRIND_RESTORE_STACK \
2226 : "r"( &_argvec[2] ) \
2227 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2228 lval = (__typeof__( lval ))_res; \
2231 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
2233 volatile OrigFn _orig = ( orig ); \
2234 volatile unsigned long _argvec[3 + 3]; \
2235 volatile unsigned long _res; \
2237 _argvec[1] = (unsigned long)_orig.r2; \
2238 _argvec[2] = (unsigned long)_orig.nraddr; \
2239 _argvec[2 + 1] = (unsigned long)arg1; \
2240 _argvec[2 + 2] = (unsigned long)arg2; \
2241 _argvec[2 + 3] = (unsigned long)arg3; \
2242 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2243 "std 2,-16(11)\n\t" \
2246 "ld 4, 16(11)\n\t" \
2247 "ld 5, 24(11)\n\t" \
2248 "ld 11, 0(11)\n\t" \
2249 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2251 "ld 2,-16(11)\n\t" \
2252 VALGRIND_RESTORE_STACK \
2254 : "r"( &_argvec[2] ) \
2255 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2256 lval = (__typeof__( lval ))_res; \
2259 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
2261 volatile OrigFn _orig = ( orig ); \
2262 volatile unsigned long _argvec[3 + 4]; \
2263 volatile unsigned long _res; \
2265 _argvec[1] = (unsigned long)_orig.r2; \
2266 _argvec[2] = (unsigned long)_orig.nraddr; \
2267 _argvec[2 + 1] = (unsigned long)arg1; \
2268 _argvec[2 + 2] = (unsigned long)arg2; \
2269 _argvec[2 + 3] = (unsigned long)arg3; \
2270 _argvec[2 + 4] = (unsigned long)arg4; \
2271 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2272 "std 2,-16(11)\n\t" \
2275 "ld 4, 16(11)\n\t" \
2276 "ld 5, 24(11)\n\t" \
2277 "ld 6, 32(11)\n\t" \
2278 "ld 11, 0(11)\n\t" \
2279 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2281 "ld 2,-16(11)\n\t" \
2282 VALGRIND_RESTORE_STACK \
2284 : "r"( &_argvec[2] ) \
2285 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2286 lval = (__typeof__( lval ))_res; \
2289 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
2291 volatile OrigFn _orig = ( orig ); \
2292 volatile unsigned long _argvec[3 + 5]; \
2293 volatile unsigned long _res; \
2295 _argvec[1] = (unsigned long)_orig.r2; \
2296 _argvec[2] = (unsigned long)_orig.nraddr; \
2297 _argvec[2 + 1] = (unsigned long)arg1; \
2298 _argvec[2 + 2] = (unsigned long)arg2; \
2299 _argvec[2 + 3] = (unsigned long)arg3; \
2300 _argvec[2 + 4] = (unsigned long)arg4; \
2301 _argvec[2 + 5] = (unsigned long)arg5; \
2302 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2303 "std 2,-16(11)\n\t" \
2306 "ld 4, 16(11)\n\t" \
2307 "ld 5, 24(11)\n\t" \
2308 "ld 6, 32(11)\n\t" \
2309 "ld 7, 40(11)\n\t" \
2310 "ld 11, 0(11)\n\t" \
2311 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2313 "ld 2,-16(11)\n\t" \
2314 VALGRIND_RESTORE_STACK \
2316 : "r"( &_argvec[2] ) \
2317 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2318 lval = (__typeof__( lval ))_res; \
2321 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
2323 volatile OrigFn _orig = ( orig ); \
2324 volatile unsigned long _argvec[3 + 6]; \
2325 volatile unsigned long _res; \
2327 _argvec[1] = (unsigned long)_orig.r2; \
2328 _argvec[2] = (unsigned long)_orig.nraddr; \
2329 _argvec[2 + 1] = (unsigned long)arg1; \
2330 _argvec[2 + 2] = (unsigned long)arg2; \
2331 _argvec[2 + 3] = (unsigned long)arg3; \
2332 _argvec[2 + 4] = (unsigned long)arg4; \
2333 _argvec[2 + 5] = (unsigned long)arg5; \
2334 _argvec[2 + 6] = (unsigned long)arg6; \
2335 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2336 "std 2,-16(11)\n\t" \
2339 "ld 4, 16(11)\n\t" \
2340 "ld 5, 24(11)\n\t" \
2341 "ld 6, 32(11)\n\t" \
2342 "ld 7, 40(11)\n\t" \
2343 "ld 8, 48(11)\n\t" \
2344 "ld 11, 0(11)\n\t" \
2345 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2347 "ld 2,-16(11)\n\t" \
2348 VALGRIND_RESTORE_STACK \
2350 : "r"( &_argvec[2] ) \
2351 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2352 lval = (__typeof__( lval ))_res; \
2355 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
2357 volatile OrigFn _orig = ( orig ); \
2358 volatile unsigned long _argvec[3 + 7]; \
2359 volatile unsigned long _res; \
2361 _argvec[1] = (unsigned long)_orig.r2; \
2362 _argvec[2] = (unsigned long)_orig.nraddr; \
2363 _argvec[2 + 1] = (unsigned long)arg1; \
2364 _argvec[2 + 2] = (unsigned long)arg2; \
2365 _argvec[2 + 3] = (unsigned long)arg3; \
2366 _argvec[2 + 4] = (unsigned long)arg4; \
2367 _argvec[2 + 5] = (unsigned long)arg5; \
2368 _argvec[2 + 6] = (unsigned long)arg6; \
2369 _argvec[2 + 7] = (unsigned long)arg7; \
2370 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2371 "std 2,-16(11)\n\t" \
2374 "ld 4, 16(11)\n\t" \
2375 "ld 5, 24(11)\n\t" \
2376 "ld 6, 32(11)\n\t" \
2377 "ld 7, 40(11)\n\t" \
2378 "ld 8, 48(11)\n\t" \
2379 "ld 9, 56(11)\n\t" \
2380 "ld 11, 0(11)\n\t" \
2381 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2383 "ld 2,-16(11)\n\t" \
2384 VALGRIND_RESTORE_STACK \
2386 : "r"( &_argvec[2] ) \
2387 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2388 lval = (__typeof__( lval ))_res; \
2391 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
2393 volatile OrigFn _orig = ( orig ); \
2394 volatile unsigned long _argvec[3 + 8]; \
2395 volatile unsigned long _res; \
2397 _argvec[1] = (unsigned long)_orig.r2; \
2398 _argvec[2] = (unsigned long)_orig.nraddr; \
2399 _argvec[2 + 1] = (unsigned long)arg1; \
2400 _argvec[2 + 2] = (unsigned long)arg2; \
2401 _argvec[2 + 3] = (unsigned long)arg3; \
2402 _argvec[2 + 4] = (unsigned long)arg4; \
2403 _argvec[2 + 5] = (unsigned long)arg5; \
2404 _argvec[2 + 6] = (unsigned long)arg6; \
2405 _argvec[2 + 7] = (unsigned long)arg7; \
2406 _argvec[2 + 8] = (unsigned long)arg8; \
2407 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2408 "std 2,-16(11)\n\t" \
2411 "ld 4, 16(11)\n\t" \
2412 "ld 5, 24(11)\n\t" \
2413 "ld 6, 32(11)\n\t" \
2414 "ld 7, 40(11)\n\t" \
2415 "ld 8, 48(11)\n\t" \
2416 "ld 9, 56(11)\n\t" \
2417 "ld 10, 64(11)\n\t" \
2418 "ld 11, 0(11)\n\t" \
2419 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2421 "ld 2,-16(11)\n\t" \
2422 VALGRIND_RESTORE_STACK \
2424 : "r"( &_argvec[2] ) \
2425 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2426 lval = (__typeof__( lval ))_res; \
2429 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
2431 volatile OrigFn _orig = ( orig ); \
2432 volatile unsigned long _argvec[3 + 9]; \
2433 volatile unsigned long _res; \
2435 _argvec[1] = (unsigned long)_orig.r2; \
2436 _argvec[2] = (unsigned long)_orig.nraddr; \
2437 _argvec[2 + 1] = (unsigned long)arg1; \
2438 _argvec[2 + 2] = (unsigned long)arg2; \
2439 _argvec[2 + 3] = (unsigned long)arg3; \
2440 _argvec[2 + 4] = (unsigned long)arg4; \
2441 _argvec[2 + 5] = (unsigned long)arg5; \
2442 _argvec[2 + 6] = (unsigned long)arg6; \
2443 _argvec[2 + 7] = (unsigned long)arg7; \
2444 _argvec[2 + 8] = (unsigned long)arg8; \
2445 _argvec[2 + 9] = (unsigned long)arg9; \
2446 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2447 "std 2,-16(11)\n\t" \
2449 "addi 1,1,-128\n\t" \
2451 "std 3,112(1)\n\t" \
2453 "ld 4, 16(11)\n\t" \
2454 "ld 5, 24(11)\n\t" \
2455 "ld 6, 32(11)\n\t" \
2456 "ld 7, 40(11)\n\t" \
2457 "ld 8, 48(11)\n\t" \
2458 "ld 9, 56(11)\n\t" \
2459 "ld 10, 64(11)\n\t" \
2460 "ld 11, 0(11)\n\t" \
2461 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2463 "ld 2,-16(11)\n\t" \
2464 VALGRIND_RESTORE_STACK \
2466 : "r"( &_argvec[2] ) \
2467 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2468 lval = (__typeof__( lval ))_res; \
2471 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2473 volatile OrigFn _orig = ( orig ); \
2474 volatile unsigned long _argvec[3 + 10]; \
2475 volatile unsigned long _res; \
2477 _argvec[1] = (unsigned long)_orig.r2; \
2478 _argvec[2] = (unsigned long)_orig.nraddr; \
2479 _argvec[2 + 1] = (unsigned long)arg1; \
2480 _argvec[2 + 2] = (unsigned long)arg2; \
2481 _argvec[2 + 3] = (unsigned long)arg3; \
2482 _argvec[2 + 4] = (unsigned long)arg4; \
2483 _argvec[2 + 5] = (unsigned long)arg5; \
2484 _argvec[2 + 6] = (unsigned long)arg6; \
2485 _argvec[2 + 7] = (unsigned long)arg7; \
2486 _argvec[2 + 8] = (unsigned long)arg8; \
2487 _argvec[2 + 9] = (unsigned long)arg9; \
2488 _argvec[2 + 10] = (unsigned long)arg10; \
2489 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2490 "std 2,-16(11)\n\t" \
2492 "addi 1,1,-128\n\t" \
2494 "std 3,120(1)\n\t" \
2496 "std 3,112(1)\n\t" \
2498 "ld 4, 16(11)\n\t" \
2499 "ld 5, 24(11)\n\t" \
2500 "ld 6, 32(11)\n\t" \
2501 "ld 7, 40(11)\n\t" \
2502 "ld 8, 48(11)\n\t" \
2503 "ld 9, 56(11)\n\t" \
2504 "ld 10, 64(11)\n\t" \
2505 "ld 11, 0(11)\n\t" \
2506 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2508 "ld 2,-16(11)\n\t" \
2509 VALGRIND_RESTORE_STACK \
2511 : "r"( &_argvec[2] ) \
2512 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2513 lval = (__typeof__( lval ))_res; \
2516 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2518 volatile OrigFn _orig = ( orig ); \
2519 volatile unsigned long _argvec[3 + 11]; \
2520 volatile unsigned long _res; \
2522 _argvec[1] = (unsigned long)_orig.r2; \
2523 _argvec[2] = (unsigned long)_orig.nraddr; \
2524 _argvec[2 + 1] = (unsigned long)arg1; \
2525 _argvec[2 + 2] = (unsigned long)arg2; \
2526 _argvec[2 + 3] = (unsigned long)arg3; \
2527 _argvec[2 + 4] = (unsigned long)arg4; \
2528 _argvec[2 + 5] = (unsigned long)arg5; \
2529 _argvec[2 + 6] = (unsigned long)arg6; \
2530 _argvec[2 + 7] = (unsigned long)arg7; \
2531 _argvec[2 + 8] = (unsigned long)arg8; \
2532 _argvec[2 + 9] = (unsigned long)arg9; \
2533 _argvec[2 + 10] = (unsigned long)arg10; \
2534 _argvec[2 + 11] = (unsigned long)arg11; \
2535 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2536 "std 2,-16(11)\n\t" \
2538 "addi 1,1,-144\n\t" \
2540 "std 3,128(1)\n\t" \
2542 "std 3,120(1)\n\t" \
2544 "std 3,112(1)\n\t" \
2546 "ld 4, 16(11)\n\t" \
2547 "ld 5, 24(11)\n\t" \
2548 "ld 6, 32(11)\n\t" \
2549 "ld 7, 40(11)\n\t" \
2550 "ld 8, 48(11)\n\t" \
2551 "ld 9, 56(11)\n\t" \
2552 "ld 10, 64(11)\n\t" \
2553 "ld 11, 0(11)\n\t" \
2554 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2556 "ld 2,-16(11)\n\t" \
2557 VALGRIND_RESTORE_STACK \
2559 : "r"( &_argvec[2] ) \
2560 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2561 lval = (__typeof__( lval ))_res; \
2564 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2566 volatile OrigFn _orig = ( orig ); \
2567 volatile unsigned long _argvec[3 + 12]; \
2568 volatile unsigned long _res; \
2570 _argvec[1] = (unsigned long)_orig.r2; \
2571 _argvec[2] = (unsigned long)_orig.nraddr; \
2572 _argvec[2 + 1] = (unsigned long)arg1; \
2573 _argvec[2 + 2] = (unsigned long)arg2; \
2574 _argvec[2 + 3] = (unsigned long)arg3; \
2575 _argvec[2 + 4] = (unsigned long)arg4; \
2576 _argvec[2 + 5] = (unsigned long)arg5; \
2577 _argvec[2 + 6] = (unsigned long)arg6; \
2578 _argvec[2 + 7] = (unsigned long)arg7; \
2579 _argvec[2 + 8] = (unsigned long)arg8; \
2580 _argvec[2 + 9] = (unsigned long)arg9; \
2581 _argvec[2 + 10] = (unsigned long)arg10; \
2582 _argvec[2 + 11] = (unsigned long)arg11; \
2583 _argvec[2 + 12] = (unsigned long)arg12; \
2584 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2585 "std 2,-16(11)\n\t" \
2587 "addi 1,1,-144\n\t" \
2589 "std 3,136(1)\n\t" \
2591 "std 3,128(1)\n\t" \
2593 "std 3,120(1)\n\t" \
2595 "std 3,112(1)\n\t" \
2597 "ld 4, 16(11)\n\t" \
2598 "ld 5, 24(11)\n\t" \
2599 "ld 6, 32(11)\n\t" \
2600 "ld 7, 40(11)\n\t" \
2601 "ld 8, 48(11)\n\t" \
2602 "ld 9, 56(11)\n\t" \
2603 "ld 10, 64(11)\n\t" \
2604 "ld 11, 0(11)\n\t" \
2605 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2607 "ld 2,-16(11)\n\t" \
2608 VALGRIND_RESTORE_STACK \
2610 : "r"( &_argvec[2] ) \
2611 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \
2612 lval = (__typeof__( lval ))_res; \
2619 #if defined( PLAT_arm_linux )
2622 # define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r14"
2637 # define VALGRIND_ALIGN_STACK \
2640 "bic r4, r4, #7\n\t" \
2642 # define VALGRIND_RESTORE_STACK "mov sp, r10\n\t"
2647 # define CALL_FN_W_v( lval, orig ) \
2649 volatile OrigFn _orig = ( orig ); \
2650 volatile unsigned long _argvec[1]; \
2651 volatile unsigned long _res; \
2652 _argvec[0] = (unsigned long)_orig.nraddr; \
2653 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r4, [%1] \n\t" \
2654 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2656 : "0"( &_argvec[0] ) \
2657 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2658 lval = (__typeof__( lval ))_res; \
2661 # define CALL_FN_W_W( lval, orig, arg1 ) \
2663 volatile OrigFn _orig = ( orig ); \
2664 volatile unsigned long _argvec[2]; \
2665 volatile unsigned long _res; \
2666 _argvec[0] = (unsigned long)_orig.nraddr; \
2667 _argvec[1] = (unsigned long)( arg1 ); \
2668 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2669 "ldr r4, [%1] \n\t" \
2670 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2672 : "0"( &_argvec[0] ) \
2673 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2674 lval = (__typeof__( lval ))_res; \
2677 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
2679 volatile OrigFn _orig = ( orig ); \
2680 volatile unsigned long _argvec[3]; \
2681 volatile unsigned long _res; \
2682 _argvec[0] = (unsigned long)_orig.nraddr; \
2683 _argvec[1] = (unsigned long)( arg1 ); \
2684 _argvec[2] = (unsigned long)( arg2 ); \
2685 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2686 "ldr r1, [%1, #8] \n\t" \
2687 "ldr r4, [%1] \n\t" \
2688 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2690 : "0"( &_argvec[0] ) \
2691 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2692 lval = (__typeof__( lval ))_res; \
2695 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
2697 volatile OrigFn _orig = ( orig ); \
2698 volatile unsigned long _argvec[4]; \
2699 volatile unsigned long _res; \
2700 _argvec[0] = (unsigned long)_orig.nraddr; \
2701 _argvec[1] = (unsigned long)( arg1 ); \
2702 _argvec[2] = (unsigned long)( arg2 ); \
2703 _argvec[3] = (unsigned long)( arg3 ); \
2704 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2705 "ldr r1, [%1, #8] \n\t" \
2706 "ldr r2, [%1, #12] \n\t" \
2707 "ldr r4, [%1] \n\t" \
2708 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2710 : "0"( &_argvec[0] ) \
2711 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2712 lval = (__typeof__( lval ))_res; \
2715 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
2717 volatile OrigFn _orig = ( orig ); \
2718 volatile unsigned long _argvec[5]; \
2719 volatile unsigned long _res; \
2720 _argvec[0] = (unsigned long)_orig.nraddr; \
2721 _argvec[1] = (unsigned long)( arg1 ); \
2722 _argvec[2] = (unsigned long)( arg2 ); \
2723 _argvec[3] = (unsigned long)( arg3 ); \
2724 _argvec[4] = (unsigned long)( arg4 ); \
2725 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2726 "ldr r1, [%1, #8] \n\t" \
2727 "ldr r2, [%1, #12] \n\t" \
2728 "ldr r3, [%1, #16] \n\t" \
2729 "ldr r4, [%1] \n\t" \
2730 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2732 : "0"( &_argvec[0] ) \
2733 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2734 lval = (__typeof__( lval ))_res; \
2737 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
2739 volatile OrigFn _orig = ( orig ); \
2740 volatile unsigned long _argvec[6]; \
2741 volatile unsigned long _res; \
2742 _argvec[0] = (unsigned long)_orig.nraddr; \
2743 _argvec[1] = (unsigned long)( arg1 ); \
2744 _argvec[2] = (unsigned long)( arg2 ); \
2745 _argvec[3] = (unsigned long)( arg3 ); \
2746 _argvec[4] = (unsigned long)( arg4 ); \
2747 _argvec[5] = (unsigned long)( arg5 ); \
2748 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2749 "ldr r0, [%1, #20] \n\t" \
2751 "ldr r0, [%1, #4] \n\t" \
2752 "ldr r1, [%1, #8] \n\t" \
2753 "ldr r2, [%1, #12] \n\t" \
2754 "ldr r3, [%1, #16] \n\t" \
2755 "ldr r4, [%1] \n\t" \
2756 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2758 : "0"( &_argvec[0] ) \
2759 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2760 lval = (__typeof__( lval ))_res; \
2763 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
2765 volatile OrigFn _orig = ( orig ); \
2766 volatile unsigned long _argvec[7]; \
2767 volatile unsigned long _res; \
2768 _argvec[0] = (unsigned long)_orig.nraddr; \
2769 _argvec[1] = (unsigned long)( arg1 ); \
2770 _argvec[2] = (unsigned long)( arg2 ); \
2771 _argvec[3] = (unsigned long)( arg3 ); \
2772 _argvec[4] = (unsigned long)( arg4 ); \
2773 _argvec[5] = (unsigned long)( arg5 ); \
2774 _argvec[6] = (unsigned long)( arg6 ); \
2775 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2776 "ldr r1, [%1, #24] \n\t" \
2777 "push {r0, r1} \n\t" \
2778 "ldr r0, [%1, #4] \n\t" \
2779 "ldr r1, [%1, #8] \n\t" \
2780 "ldr r2, [%1, #12] \n\t" \
2781 "ldr r3, [%1, #16] \n\t" \
2782 "ldr r4, [%1] \n\t" \
2783 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2785 : "0"( &_argvec[0] ) \
2786 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2787 lval = (__typeof__( lval ))_res; \
2790 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
2792 volatile OrigFn _orig = ( orig ); \
2793 volatile unsigned long _argvec[8]; \
2794 volatile unsigned long _res; \
2795 _argvec[0] = (unsigned long)_orig.nraddr; \
2796 _argvec[1] = (unsigned long)( arg1 ); \
2797 _argvec[2] = (unsigned long)( arg2 ); \
2798 _argvec[3] = (unsigned long)( arg3 ); \
2799 _argvec[4] = (unsigned long)( arg4 ); \
2800 _argvec[5] = (unsigned long)( arg5 ); \
2801 _argvec[6] = (unsigned long)( arg6 ); \
2802 _argvec[7] = (unsigned long)( arg7 ); \
2803 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2804 "ldr r0, [%1, #20] \n\t" \
2805 "ldr r1, [%1, #24] \n\t" \
2806 "ldr r2, [%1, #28] \n\t" \
2807 "push {r0, r1, r2} \n\t" \
2808 "ldr r0, [%1, #4] \n\t" \
2809 "ldr r1, [%1, #8] \n\t" \
2810 "ldr r2, [%1, #12] \n\t" \
2811 "ldr r3, [%1, #16] \n\t" \
2812 "ldr r4, [%1] \n\t" \
2813 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2815 : "0"( &_argvec[0] ) \
2816 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2817 lval = (__typeof__( lval ))_res; \
2820 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
2822 volatile OrigFn _orig = ( orig ); \
2823 volatile unsigned long _argvec[9]; \
2824 volatile unsigned long _res; \
2825 _argvec[0] = (unsigned long)_orig.nraddr; \
2826 _argvec[1] = (unsigned long)( arg1 ); \
2827 _argvec[2] = (unsigned long)( arg2 ); \
2828 _argvec[3] = (unsigned long)( arg3 ); \
2829 _argvec[4] = (unsigned long)( arg4 ); \
2830 _argvec[5] = (unsigned long)( arg5 ); \
2831 _argvec[6] = (unsigned long)( arg6 ); \
2832 _argvec[7] = (unsigned long)( arg7 ); \
2833 _argvec[8] = (unsigned long)( arg8 ); \
2834 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2835 "ldr r1, [%1, #24] \n\t" \
2836 "ldr r2, [%1, #28] \n\t" \
2837 "ldr r3, [%1, #32] \n\t" \
2838 "push {r0, r1, r2, r3} \n\t" \
2839 "ldr r0, [%1, #4] \n\t" \
2840 "ldr r1, [%1, #8] \n\t" \
2841 "ldr r2, [%1, #12] \n\t" \
2842 "ldr r3, [%1, #16] \n\t" \
2843 "ldr r4, [%1] \n\t" \
2844 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2846 : "0"( &_argvec[0] ) \
2847 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2848 lval = (__typeof__( lval ))_res; \
2851 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
2853 volatile OrigFn _orig = ( orig ); \
2854 volatile unsigned long _argvec[10]; \
2855 volatile unsigned long _res; \
2856 _argvec[0] = (unsigned long)_orig.nraddr; \
2857 _argvec[1] = (unsigned long)( arg1 ); \
2858 _argvec[2] = (unsigned long)( arg2 ); \
2859 _argvec[3] = (unsigned long)( arg3 ); \
2860 _argvec[4] = (unsigned long)( arg4 ); \
2861 _argvec[5] = (unsigned long)( arg5 ); \
2862 _argvec[6] = (unsigned long)( arg6 ); \
2863 _argvec[7] = (unsigned long)( arg7 ); \
2864 _argvec[8] = (unsigned long)( arg8 ); \
2865 _argvec[9] = (unsigned long)( arg9 ); \
2866 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2867 "ldr r0, [%1, #20] \n\t" \
2868 "ldr r1, [%1, #24] \n\t" \
2869 "ldr r2, [%1, #28] \n\t" \
2870 "ldr r3, [%1, #32] \n\t" \
2871 "ldr r4, [%1, #36] \n\t" \
2872 "push {r0, r1, r2, r3, r4} \n\t" \
2873 "ldr r0, [%1, #4] \n\t" \
2874 "ldr r1, [%1, #8] \n\t" \
2875 "ldr r2, [%1, #12] \n\t" \
2876 "ldr r3, [%1, #16] \n\t" \
2877 "ldr r4, [%1] \n\t" \
2878 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2880 : "0"( &_argvec[0] ) \
2881 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2882 lval = (__typeof__( lval ))_res; \
2885 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
2887 volatile OrigFn _orig = ( orig ); \
2888 volatile unsigned long _argvec[11]; \
2889 volatile unsigned long _res; \
2890 _argvec[0] = (unsigned long)_orig.nraddr; \
2891 _argvec[1] = (unsigned long)( arg1 ); \
2892 _argvec[2] = (unsigned long)( arg2 ); \
2893 _argvec[3] = (unsigned long)( arg3 ); \
2894 _argvec[4] = (unsigned long)( arg4 ); \
2895 _argvec[5] = (unsigned long)( arg5 ); \
2896 _argvec[6] = (unsigned long)( arg6 ); \
2897 _argvec[7] = (unsigned long)( arg7 ); \
2898 _argvec[8] = (unsigned long)( arg8 ); \
2899 _argvec[9] = (unsigned long)( arg9 ); \
2900 _argvec[10] = (unsigned long)( arg10 ); \
2901 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2903 "ldr r0, [%1, #20] \n\t" \
2904 "ldr r1, [%1, #24] \n\t" \
2905 "ldr r2, [%1, #28] \n\t" \
2906 "ldr r3, [%1, #32] \n\t" \
2907 "ldr r4, [%1, #36] \n\t" \
2908 "push {r0, r1, r2, r3, r4} \n\t" \
2909 "ldr r0, [%1, #4] \n\t" \
2910 "ldr r1, [%1, #8] \n\t" \
2911 "ldr r2, [%1, #12] \n\t" \
2912 "ldr r3, [%1, #16] \n\t" \
2913 "ldr r4, [%1] \n\t" \
2914 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2916 : "0"( &_argvec[0] ) \
2917 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2918 lval = (__typeof__( lval ))_res; \
2921 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
2923 volatile OrigFn _orig = ( orig ); \
2924 volatile unsigned long _argvec[12]; \
2925 volatile unsigned long _res; \
2926 _argvec[0] = (unsigned long)_orig.nraddr; \
2927 _argvec[1] = (unsigned long)( arg1 ); \
2928 _argvec[2] = (unsigned long)( arg2 ); \
2929 _argvec[3] = (unsigned long)( arg3 ); \
2930 _argvec[4] = (unsigned long)( arg4 ); \
2931 _argvec[5] = (unsigned long)( arg5 ); \
2932 _argvec[6] = (unsigned long)( arg6 ); \
2933 _argvec[7] = (unsigned long)( arg7 ); \
2934 _argvec[8] = (unsigned long)( arg8 ); \
2935 _argvec[9] = (unsigned long)( arg9 ); \
2936 _argvec[10] = (unsigned long)( arg10 ); \
2937 _argvec[11] = (unsigned long)( arg11 ); \
2938 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2939 "ldr r0, [%1, #40] \n\t" \
2940 "ldr r1, [%1, #44] \n\t" \
2941 "push {r0, r1} \n\t" \
2942 "ldr r0, [%1, #20] \n\t" \
2943 "ldr r1, [%1, #24] \n\t" \
2944 "ldr r2, [%1, #28] \n\t" \
2945 "ldr r3, [%1, #32] \n\t" \
2946 "ldr r4, [%1, #36] \n\t" \
2947 "push {r0, r1, r2, r3, r4} \n\t" \
2948 "ldr r0, [%1, #4] \n\t" \
2949 "ldr r1, [%1, #8] \n\t" \
2950 "ldr r2, [%1, #12] \n\t" \
2951 "ldr r3, [%1, #16] \n\t" \
2952 "ldr r4, [%1] \n\t" \
2953 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2955 : "0"( &_argvec[0] ) \
2956 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2957 lval = (__typeof__( lval ))_res; \
2960 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
2962 volatile OrigFn _orig = ( orig ); \
2963 volatile unsigned long _argvec[13]; \
2964 volatile unsigned long _res; \
2965 _argvec[0] = (unsigned long)_orig.nraddr; \
2966 _argvec[1] = (unsigned long)( arg1 ); \
2967 _argvec[2] = (unsigned long)( arg2 ); \
2968 _argvec[3] = (unsigned long)( arg3 ); \
2969 _argvec[4] = (unsigned long)( arg4 ); \
2970 _argvec[5] = (unsigned long)( arg5 ); \
2971 _argvec[6] = (unsigned long)( arg6 ); \
2972 _argvec[7] = (unsigned long)( arg7 ); \
2973 _argvec[8] = (unsigned long)( arg8 ); \
2974 _argvec[9] = (unsigned long)( arg9 ); \
2975 _argvec[10] = (unsigned long)( arg10 ); \
2976 _argvec[11] = (unsigned long)( arg11 ); \
2977 _argvec[12] = (unsigned long)( arg12 ); \
2978 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2979 "ldr r1, [%1, #44] \n\t" \
2980 "ldr r2, [%1, #48] \n\t" \
2981 "push {r0, r1, r2} \n\t" \
2982 "ldr r0, [%1, #20] \n\t" \
2983 "ldr r1, [%1, #24] \n\t" \
2984 "ldr r2, [%1, #28] \n\t" \
2985 "ldr r3, [%1, #32] \n\t" \
2986 "ldr r4, [%1, #36] \n\t" \
2987 "push {r0, r1, r2, r3, r4} \n\t" \
2988 "ldr r0, [%1, #4] \n\t" \
2989 "ldr r1, [%1, #8] \n\t" \
2990 "ldr r2, [%1, #12] \n\t" \
2991 "ldr r3, [%1, #16] \n\t" \
2992 "ldr r4, [%1] \n\t" \
2993 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2995 : "0"( &_argvec[0] ) \
2996 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \
2997 lval = (__typeof__( lval ))_res; \
3004 #if defined( PLAT_s390x_linux )
3010 # if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM )
3011 # define __FRAME_POINTER , "d"( __builtin_dwarf_cfa() )
3012 # define VALGRIND_CFI_PROLOGUE \
3013 ".cfi_remember_state\n\t" \
3017 ".cfi_def_cfa r11, 0\n\t"
3018 # define VALGRIND_CFI_EPILOGUE \
3020 ".cfi_restore_state\n\t"
3022 # define __FRAME_POINTER
3023 # define VALGRIND_CFI_PROLOGUE "lgr 1,%1\n\t"
3024 # define VALGRIND_CFI_EPILOGUE
3036 # define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
3047 # define CALL_FN_W_v( lval, orig ) \
3049 volatile OrigFn _orig = ( orig ); \
3050 volatile unsigned long _argvec[1]; \
3051 volatile unsigned long _res; \
3052 _argvec[0] = (unsigned long)_orig.nraddr; \
3053 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3055 VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3056 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3058 : "d"(&_argvec[0])__FRAME_POINTER \
3059 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3060 lval = (__typeof__( lval ))_res; \
3064 # define CALL_FN_W_W( lval, orig, arg1 ) \
3066 volatile OrigFn _orig = ( orig ); \
3067 volatile unsigned long _argvec[2]; \
3068 volatile unsigned long _res; \
3069 _argvec[0] = (unsigned long)_orig.nraddr; \
3070 _argvec[1] = (unsigned long)arg1; \
3071 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3073 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3074 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3076 : "a"(&_argvec[0])__FRAME_POINTER \
3077 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3078 lval = (__typeof__( lval ))_res; \
3081 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3083 volatile OrigFn _orig = ( orig ); \
3084 volatile unsigned long _argvec[3]; \
3085 volatile unsigned long _res; \
3086 _argvec[0] = (unsigned long)_orig.nraddr; \
3087 _argvec[1] = (unsigned long)arg1; \
3088 _argvec[2] = (unsigned long)arg2; \
3089 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3092 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3093 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3095 : "a"(&_argvec[0])__FRAME_POINTER \
3096 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3097 lval = (__typeof__( lval ))_res; \
3100 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3102 volatile OrigFn _orig = ( orig ); \
3103 volatile unsigned long _argvec[4]; \
3104 volatile unsigned long _res; \
3105 _argvec[0] = (unsigned long)_orig.nraddr; \
3106 _argvec[1] = (unsigned long)arg1; \
3107 _argvec[2] = (unsigned long)arg2; \
3108 _argvec[3] = (unsigned long)arg3; \
3109 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3113 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3114 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3116 : "a"(&_argvec[0])__FRAME_POINTER \
3117 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3118 lval = (__typeof__( lval ))_res; \
3121 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3123 volatile OrigFn _orig = ( orig ); \
3124 volatile unsigned long _argvec[5]; \
3125 volatile unsigned long _res; \
3126 _argvec[0] = (unsigned long)_orig.nraddr; \
3127 _argvec[1] = (unsigned long)arg1; \
3128 _argvec[2] = (unsigned long)arg2; \
3129 _argvec[3] = (unsigned long)arg3; \
3130 _argvec[4] = (unsigned long)arg4; \
3131 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3136 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3137 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3139 : "a"(&_argvec[0])__FRAME_POINTER \
3140 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \
3141 lval = (__typeof__( lval ))_res; \
3144 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3146 volatile OrigFn _orig = ( orig ); \
3147 volatile unsigned long _argvec[6]; \
3148 volatile unsigned long _res; \
3149 _argvec[0] = (unsigned long)_orig.nraddr; \
3150 _argvec[1] = (unsigned long)arg1; \
3151 _argvec[2] = (unsigned long)arg2; \
3152 _argvec[3] = (unsigned long)arg3; \
3153 _argvec[4] = (unsigned long)arg4; \
3154 _argvec[5] = (unsigned long)arg5; \
3155 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3161 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3162 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3164 : "a"(&_argvec[0])__FRAME_POINTER \
3165 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3166 lval = (__typeof__( lval ))_res; \
3169 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
3171 volatile OrigFn _orig = ( orig ); \
3172 volatile unsigned long _argvec[7]; \
3173 volatile unsigned long _res; \
3174 _argvec[0] = (unsigned long)_orig.nraddr; \
3175 _argvec[1] = (unsigned long)arg1; \
3176 _argvec[2] = (unsigned long)arg2; \
3177 _argvec[3] = (unsigned long)arg3; \
3178 _argvec[4] = (unsigned long)arg4; \
3179 _argvec[5] = (unsigned long)arg5; \
3180 _argvec[6] = (unsigned long)arg6; \
3181 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-168\n\t" \
3187 "mvc 160(8,15), 48(1)\n\t" \
3188 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3189 "aghi 15,168\n\t" VALGRIND_CFI_EPILOGUE \
3191 : "a"(&_argvec[0])__FRAME_POINTER \
3192 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3193 lval = (__typeof__( lval ))_res; \
3196 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
3198 volatile OrigFn _orig = ( orig ); \
3199 volatile unsigned long _argvec[8]; \
3200 volatile unsigned long _res; \
3201 _argvec[0] = (unsigned long)_orig.nraddr; \
3202 _argvec[1] = (unsigned long)arg1; \
3203 _argvec[2] = (unsigned long)arg2; \
3204 _argvec[3] = (unsigned long)arg3; \
3205 _argvec[4] = (unsigned long)arg4; \
3206 _argvec[5] = (unsigned long)arg5; \
3207 _argvec[6] = (unsigned long)arg6; \
3208 _argvec[7] = (unsigned long)arg7; \
3209 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-176\n\t" \
3215 "mvc 160(8,15), 48(1)\n\t" \
3216 "mvc 168(8,15), 56(1)\n\t" \
3217 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3218 "aghi 15,176\n\t" VALGRIND_CFI_EPILOGUE \
3220 : "a"(&_argvec[0])__FRAME_POINTER \
3221 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3222 lval = (__typeof__( lval ))_res; \
3225 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
3227 volatile OrigFn _orig = ( orig ); \
3228 volatile unsigned long _argvec[9]; \
3229 volatile unsigned long _res; \
3230 _argvec[0] = (unsigned long)_orig.nraddr; \
3231 _argvec[1] = (unsigned long)arg1; \
3232 _argvec[2] = (unsigned long)arg2; \
3233 _argvec[3] = (unsigned long)arg3; \
3234 _argvec[4] = (unsigned long)arg4; \
3235 _argvec[5] = (unsigned long)arg5; \
3236 _argvec[6] = (unsigned long)arg6; \
3237 _argvec[7] = (unsigned long)arg7; \
3238 _argvec[8] = (unsigned long)arg8; \
3239 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-184\n\t" \
3245 "mvc 160(8,15), 48(1)\n\t" \
3246 "mvc 168(8,15), 56(1)\n\t" \
3247 "mvc 176(8,15), 64(1)\n\t" \
3248 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3249 "aghi 15,184\n\t" VALGRIND_CFI_EPILOGUE \
3251 : "a"(&_argvec[0])__FRAME_POINTER \
3252 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3253 lval = (__typeof__( lval ))_res; \
3256 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
3258 volatile OrigFn _orig = ( orig ); \
3259 volatile unsigned long _argvec[10]; \
3260 volatile unsigned long _res; \
3261 _argvec[0] = (unsigned long)_orig.nraddr; \
3262 _argvec[1] = (unsigned long)arg1; \
3263 _argvec[2] = (unsigned long)arg2; \
3264 _argvec[3] = (unsigned long)arg3; \
3265 _argvec[4] = (unsigned long)arg4; \
3266 _argvec[5] = (unsigned long)arg5; \
3267 _argvec[6] = (unsigned long)arg6; \
3268 _argvec[7] = (unsigned long)arg7; \
3269 _argvec[8] = (unsigned long)arg8; \
3270 _argvec[9] = (unsigned long)arg9; \
3271 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-192\n\t" \
3277 "mvc 160(8,15), 48(1)\n\t" \
3278 "mvc 168(8,15), 56(1)\n\t" \
3279 "mvc 176(8,15), 64(1)\n\t" \
3280 "mvc 184(8,15), 72(1)\n\t" \
3281 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3282 "aghi 15,192\n\t" VALGRIND_CFI_EPILOGUE \
3284 : "a"(&_argvec[0])__FRAME_POINTER \
3285 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3286 lval = (__typeof__( lval ))_res; \
3289 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
3291 volatile OrigFn _orig = ( orig ); \
3292 volatile unsigned long _argvec[11]; \
3293 volatile unsigned long _res; \
3294 _argvec[0] = (unsigned long)_orig.nraddr; \
3295 _argvec[1] = (unsigned long)arg1; \
3296 _argvec[2] = (unsigned long)arg2; \
3297 _argvec[3] = (unsigned long)arg3; \
3298 _argvec[4] = (unsigned long)arg4; \
3299 _argvec[5] = (unsigned long)arg5; \
3300 _argvec[6] = (unsigned long)arg6; \
3301 _argvec[7] = (unsigned long)arg7; \
3302 _argvec[8] = (unsigned long)arg8; \
3303 _argvec[9] = (unsigned long)arg9; \
3304 _argvec[10] = (unsigned long)arg10; \
3305 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-200\n\t" \
3311 "mvc 160(8,15), 48(1)\n\t" \
3312 "mvc 168(8,15), 56(1)\n\t" \
3313 "mvc 176(8,15), 64(1)\n\t" \
3314 "mvc 184(8,15), 72(1)\n\t" \
3315 "mvc 192(8,15), 80(1)\n\t" \
3316 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3317 "aghi 15,200\n\t" VALGRIND_CFI_EPILOGUE \
3319 : "a"(&_argvec[0])__FRAME_POINTER \
3320 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3321 lval = (__typeof__( lval ))_res; \
3324 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
3326 volatile OrigFn _orig = ( orig ); \
3327 volatile unsigned long _argvec[12]; \
3328 volatile unsigned long _res; \
3329 _argvec[0] = (unsigned long)_orig.nraddr; \
3330 _argvec[1] = (unsigned long)arg1; \
3331 _argvec[2] = (unsigned long)arg2; \
3332 _argvec[3] = (unsigned long)arg3; \
3333 _argvec[4] = (unsigned long)arg4; \
3334 _argvec[5] = (unsigned long)arg5; \
3335 _argvec[6] = (unsigned long)arg6; \
3336 _argvec[7] = (unsigned long)arg7; \
3337 _argvec[8] = (unsigned long)arg8; \
3338 _argvec[9] = (unsigned long)arg9; \
3339 _argvec[10] = (unsigned long)arg10; \
3340 _argvec[11] = (unsigned long)arg11; \
3341 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-208\n\t" \
3347 "mvc 160(8,15), 48(1)\n\t" \
3348 "mvc 168(8,15), 56(1)\n\t" \
3349 "mvc 176(8,15), 64(1)\n\t" \
3350 "mvc 184(8,15), 72(1)\n\t" \
3351 "mvc 192(8,15), 80(1)\n\t" \
3352 "mvc 200(8,15), 88(1)\n\t" \
3353 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3354 "aghi 15,208\n\t" VALGRIND_CFI_EPILOGUE \
3356 : "a"(&_argvec[0])__FRAME_POINTER \
3357 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3358 lval = (__typeof__( lval ))_res; \
3361 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
3363 volatile OrigFn _orig = ( orig ); \
3364 volatile unsigned long _argvec[13]; \
3365 volatile unsigned long _res; \
3366 _argvec[0] = (unsigned long)_orig.nraddr; \
3367 _argvec[1] = (unsigned long)arg1; \
3368 _argvec[2] = (unsigned long)arg2; \
3369 _argvec[3] = (unsigned long)arg3; \
3370 _argvec[4] = (unsigned long)arg4; \
3371 _argvec[5] = (unsigned long)arg5; \
3372 _argvec[6] = (unsigned long)arg6; \
3373 _argvec[7] = (unsigned long)arg7; \
3374 _argvec[8] = (unsigned long)arg8; \
3375 _argvec[9] = (unsigned long)arg9; \
3376 _argvec[10] = (unsigned long)arg10; \
3377 _argvec[11] = (unsigned long)arg11; \
3378 _argvec[12] = (unsigned long)arg12; \
3379 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-216\n\t" \
3385 "mvc 160(8,15), 48(1)\n\t" \
3386 "mvc 168(8,15), 56(1)\n\t" \
3387 "mvc 176(8,15), 64(1)\n\t" \
3388 "mvc 184(8,15), 72(1)\n\t" \
3389 "mvc 192(8,15), 80(1)\n\t" \
3390 "mvc 200(8,15), 88(1)\n\t" \
3391 "mvc 208(8,15), 96(1)\n\t" \
3392 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3393 "aghi 15,216\n\t" VALGRIND_CFI_EPILOGUE \
3395 : "a"(&_argvec[0])__FRAME_POINTER \
3396 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \
3397 lval = (__typeof__( lval ))_res; \
3404 #if defined( PLAT_mips32_linux )
3407 # define __CALLER_SAVED_REGS \
3408 "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31"
3413 # define CALL_FN_W_v( lval, orig ) \
3415 volatile OrigFn _orig = ( orig ); \
3416 volatile unsigned long _argvec[1]; \
3417 volatile unsigned long _res; \
3418 _argvec[0] = (unsigned long)_orig.nraddr; \
3419 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3420 "sw $28, 0($29) \n\t" \
3421 "sw $31, 4($29) \n\t" \
3422 "subu $29, $29, 16 \n\t" \
3423 "lw $25, 0(%1) \n\t" \
3424 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16\n\t" \
3425 "lw $28, 0($29) \n\t" \
3426 "lw $31, 4($29) \n\t" \
3427 "addu $29, $29, 8 \n\t" \
3430 : "0"( &_argvec[0] ) \
3431 : "memory", __CALLER_SAVED_REGS ); \
3432 lval = (__typeof__( lval ))_res; \
3435 # define CALL_FN_W_W( lval, orig, arg1 ) \
3437 volatile OrigFn _orig = ( orig ); \
3438 volatile unsigned long _argvec[2]; \
3439 volatile unsigned long _res; \
3440 _argvec[0] = (unsigned long)_orig.nraddr; \
3441 _argvec[1] = (unsigned long)( arg1 ); \
3442 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3443 "sw $28, 0($29) \n\t" \
3444 "sw $31, 4($29) \n\t" \
3445 "subu $29, $29, 16 \n\t" \
3446 "lw $4, 4(%1) \n\t" \
3447 "lw $25, 0(%1) \n\t" \
3448 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3449 "lw $28, 0($29) \n\t" \
3450 "lw $31, 4($29) \n\t" \
3451 "addu $29, $29, 8 \n\t" \
3454 : "0"( &_argvec[0] ) \
3455 : "memory", __CALLER_SAVED_REGS ); \
3456 lval = (__typeof__( lval ))_res; \
3459 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3461 volatile OrigFn _orig = ( orig ); \
3462 volatile unsigned long _argvec[3]; \
3463 volatile unsigned long _res; \
3464 _argvec[0] = (unsigned long)_orig.nraddr; \
3465 _argvec[1] = (unsigned long)( arg1 ); \
3466 _argvec[2] = (unsigned long)( arg2 ); \
3467 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3468 "sw $28, 0($29) \n\t" \
3469 "sw $31, 4($29) \n\t" \
3470 "subu $29, $29, 16 \n\t" \
3471 "lw $4, 4(%1) \n\t" \
3472 "lw $5, 8(%1) \n\t" \
3473 "lw $25, 0(%1) \n\t" \
3474 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3475 "lw $28, 0($29) \n\t" \
3476 "lw $31, 4($29) \n\t" \
3477 "addu $29, $29, 8 \n\t" \
3480 : "0"( &_argvec[0] ) \
3481 : "memory", __CALLER_SAVED_REGS ); \
3482 lval = (__typeof__( lval ))_res; \
3485 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3487 volatile OrigFn _orig = ( orig ); \
3488 volatile unsigned long _argvec[4]; \
3489 volatile unsigned long _res; \
3490 _argvec[0] = (unsigned long)_orig.nraddr; \
3491 _argvec[1] = (unsigned long)( arg1 ); \
3492 _argvec[2] = (unsigned long)( arg2 ); \
3493 _argvec[3] = (unsigned long)( arg3 ); \
3494 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3495 "sw $28, 0($29) \n\t" \
3496 "sw $31, 4($29) \n\t" \
3497 "subu $29, $29, 16 \n\t" \
3498 "lw $4, 4(%1) \n\t" \
3499 "lw $5, 8(%1) \n\t" \
3500 "lw $6, 12(%1) \n\t" \
3501 "lw $25, 0(%1) \n\t" \
3502 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3503 "lw $28, 0($29) \n\t" \
3504 "lw $31, 4($29) \n\t" \
3505 "addu $29, $29, 8 \n\t" \
3508 : "0"( &_argvec[0] ) \
3509 : "memory", __CALLER_SAVED_REGS ); \
3510 lval = (__typeof__( lval ))_res; \
3513 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3515 volatile OrigFn _orig = ( orig ); \
3516 volatile unsigned long _argvec[5]; \
3517 volatile unsigned long _res; \
3518 _argvec[0] = (unsigned long)_orig.nraddr; \
3519 _argvec[1] = (unsigned long)( arg1 ); \
3520 _argvec[2] = (unsigned long)( arg2 ); \
3521 _argvec[3] = (unsigned long)( arg3 ); \
3522 _argvec[4] = (unsigned long)( arg4 ); \
3523 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3524 "sw $28, 0($29) \n\t" \
3525 "sw $31, 4($29) \n\t" \
3526 "subu $29, $29, 16 \n\t" \
3527 "lw $4, 4(%1) \n\t" \
3528 "lw $5, 8(%1) \n\t" \
3529 "lw $6, 12(%1) \n\t" \
3530 "lw $7, 16(%1) \n\t" \
3531 "lw $25, 0(%1) \n\t" \
3532 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3533 "lw $28, 0($29) \n\t" \
3534 "lw $31, 4($29) \n\t" \
3535 "addu $29, $29, 8 \n\t" \
3538 : "0"( &_argvec[0] ) \
3539 : "memory", __CALLER_SAVED_REGS ); \
3540 lval = (__typeof__( lval ))_res; \
3543 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3545 volatile OrigFn _orig = ( orig ); \
3546 volatile unsigned long _argvec[6]; \
3547 volatile unsigned long _res; \
3548 _argvec[0] = (unsigned long)_orig.nraddr; \
3549 _argvec[1] = (unsigned long)( arg1 ); \
3550 _argvec[2] = (unsigned long)( arg2 ); \
3551 _argvec[3] = (unsigned long)( arg3 ); \
3552 _argvec[4] = (unsigned long)( arg4 ); \
3553 _argvec[5] = (unsigned long)( arg5 ); \
3554 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3555 "sw $28, 0($29) \n\t" \
3556 "sw $31, 4($29) \n\t" \
3557 "lw $4, 20(%1) \n\t" \
3558 "subu $29, $29, 24\n\t" \
3559 "sw $4, 16($29) \n\t" \
3560 "lw $4, 4(%1) \n\t" \
3561 "lw $5, 8(%1) \n\t" \
3562 "lw $6, 12(%1) \n\t" \
3563 "lw $7, 16(%1) \n\t" \
3564 "lw $25, 0(%1) \n\t" \
3565 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 24 \n\t" \
3566 "lw $28, 0($29) \n\t" \
3567 "lw $31, 4($29) \n\t" \
3568 "addu $29, $29, 8 \n\t" \
3571 : "0"( &_argvec[0] ) \
3572 : "memory", __CALLER_SAVED_REGS ); \
3573 lval = (__typeof__( lval ))_res; \
3575 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
3577 volatile OrigFn _orig = ( orig ); \
3578 volatile unsigned long _argvec[7]; \
3579 volatile unsigned long _res; \
3580 _argvec[0] = (unsigned long)_orig.nraddr; \
3581 _argvec[1] = (unsigned long)( arg1 ); \
3582 _argvec[2] = (unsigned long)( arg2 ); \
3583 _argvec[3] = (unsigned long)( arg3 ); \
3584 _argvec[4] = (unsigned long)( arg4 ); \
3585 _argvec[5] = (unsigned long)( arg5 ); \
3586 _argvec[6] = (unsigned long)( arg6 ); \
3587 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3588 "sw $28, 0($29) \n\t" \
3589 "sw $31, 4($29) \n\t" \
3590 "lw $4, 20(%1) \n\t" \
3591 "subu $29, $29, 32\n\t" \
3592 "sw $4, 16($29) \n\t" \
3593 "lw $4, 24(%1) \n\t" \
3595 "sw $4, 20($29) \n\t" \
3596 "lw $4, 4(%1) \n\t" \
3597 "lw $5, 8(%1) \n\t" \
3598 "lw $6, 12(%1) \n\t" \
3599 "lw $7, 16(%1) \n\t" \
3600 "lw $25, 0(%1) \n\t" \
3601 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \
3602 "lw $28, 0($29) \n\t" \
3603 "lw $31, 4($29) \n\t" \
3604 "addu $29, $29, 8 \n\t" \
3607 : "0"( &_argvec[0] ) \
3608 : "memory", __CALLER_SAVED_REGS ); \
3609 lval = (__typeof__( lval ))_res; \
3612 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
3614 volatile OrigFn _orig = ( orig ); \
3615 volatile unsigned long _argvec[8]; \
3616 volatile unsigned long _res; \
3617 _argvec[0] = (unsigned long)_orig.nraddr; \
3618 _argvec[1] = (unsigned long)( arg1 ); \
3619 _argvec[2] = (unsigned long)( arg2 ); \
3620 _argvec[3] = (unsigned long)( arg3 ); \
3621 _argvec[4] = (unsigned long)( arg4 ); \
3622 _argvec[5] = (unsigned long)( arg5 ); \
3623 _argvec[6] = (unsigned long)( arg6 ); \
3624 _argvec[7] = (unsigned long)( arg7 ); \
3625 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3626 "sw $28, 0($29) \n\t" \
3627 "sw $31, 4($29) \n\t" \
3628 "lw $4, 20(%1) \n\t" \
3629 "subu $29, $29, 32\n\t" \
3630 "sw $4, 16($29) \n\t" \
3631 "lw $4, 24(%1) \n\t" \
3632 "sw $4, 20($29) \n\t" \
3633 "lw $4, 28(%1) \n\t" \
3634 "sw $4, 24($29) \n\t" \
3635 "lw $4, 4(%1) \n\t" \
3636 "lw $5, 8(%1) \n\t" \
3637 "lw $6, 12(%1) \n\t" \
3638 "lw $7, 16(%1) \n\t" \
3639 "lw $25, 0(%1) \n\t" \
3640 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \
3641 "lw $28, 0($29) \n\t" \
3642 "lw $31, 4($29) \n\t" \
3643 "addu $29, $29, 8 \n\t" \
3646 : "0"( &_argvec[0] ) \
3647 : "memory", __CALLER_SAVED_REGS ); \
3648 lval = (__typeof__( lval ))_res; \
3651 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
3653 volatile OrigFn _orig = ( orig ); \
3654 volatile unsigned long _argvec[9]; \
3655 volatile unsigned long _res; \
3656 _argvec[0] = (unsigned long)_orig.nraddr; \
3657 _argvec[1] = (unsigned long)( arg1 ); \
3658 _argvec[2] = (unsigned long)( arg2 ); \
3659 _argvec[3] = (unsigned long)( arg3 ); \
3660 _argvec[4] = (unsigned long)( arg4 ); \
3661 _argvec[5] = (unsigned long)( arg5 ); \
3662 _argvec[6] = (unsigned long)( arg6 ); \
3663 _argvec[7] = (unsigned long)( arg7 ); \
3664 _argvec[8] = (unsigned long)( arg8 ); \
3665 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3666 "sw $28, 0($29) \n\t" \
3667 "sw $31, 4($29) \n\t" \
3668 "lw $4, 20(%1) \n\t" \
3669 "subu $29, $29, 40\n\t" \
3670 "sw $4, 16($29) \n\t" \
3671 "lw $4, 24(%1) \n\t" \
3672 "sw $4, 20($29) \n\t" \
3673 "lw $4, 28(%1) \n\t" \
3674 "sw $4, 24($29) \n\t" \
3675 "lw $4, 32(%1) \n\t" \
3676 "sw $4, 28($29) \n\t" \
3677 "lw $4, 4(%1) \n\t" \
3678 "lw $5, 8(%1) \n\t" \
3679 "lw $6, 12(%1) \n\t" \
3680 "lw $7, 16(%1) \n\t" \
3681 "lw $25, 0(%1) \n\t" \
3682 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \
3683 "lw $28, 0($29) \n\t" \
3684 "lw $31, 4($29) \n\t" \
3685 "addu $29, $29, 8 \n\t" \
3688 : "0"( &_argvec[0] ) \
3689 : "memory", __CALLER_SAVED_REGS ); \
3690 lval = (__typeof__( lval ))_res; \
3693 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
3695 volatile OrigFn _orig = ( orig ); \
3696 volatile unsigned long _argvec[10]; \
3697 volatile unsigned long _res; \
3698 _argvec[0] = (unsigned long)_orig.nraddr; \
3699 _argvec[1] = (unsigned long)( arg1 ); \
3700 _argvec[2] = (unsigned long)( arg2 ); \
3701 _argvec[3] = (unsigned long)( arg3 ); \
3702 _argvec[4] = (unsigned long)( arg4 ); \
3703 _argvec[5] = (unsigned long)( arg5 ); \
3704 _argvec[6] = (unsigned long)( arg6 ); \
3705 _argvec[7] = (unsigned long)( arg7 ); \
3706 _argvec[8] = (unsigned long)( arg8 ); \
3707 _argvec[9] = (unsigned long)( arg9 ); \
3708 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3709 "sw $28, 0($29) \n\t" \
3710 "sw $31, 4($29) \n\t" \
3711 "lw $4, 20(%1) \n\t" \
3712 "subu $29, $29, 40\n\t" \
3713 "sw $4, 16($29) \n\t" \
3714 "lw $4, 24(%1) \n\t" \
3715 "sw $4, 20($29) \n\t" \
3716 "lw $4, 28(%1) \n\t" \
3717 "sw $4, 24($29) \n\t" \
3718 "lw $4, 32(%1) \n\t" \
3719 "sw $4, 28($29) \n\t" \
3720 "lw $4, 36(%1) \n\t" \
3721 "sw $4, 32($29) \n\t" \
3722 "lw $4, 4(%1) \n\t" \
3723 "lw $5, 8(%1) \n\t" \
3724 "lw $6, 12(%1) \n\t" \
3725 "lw $7, 16(%1) \n\t" \
3726 "lw $25, 0(%1) \n\t" \
3727 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \
3728 "lw $28, 0($29) \n\t" \
3729 "lw $31, 4($29) \n\t" \
3730 "addu $29, $29, 8 \n\t" \
3733 : "0"( &_argvec[0] ) \
3734 : "memory", __CALLER_SAVED_REGS ); \
3735 lval = (__typeof__( lval ))_res; \
3738 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
3740 volatile OrigFn _orig = ( orig ); \
3741 volatile unsigned long _argvec[11]; \
3742 volatile unsigned long _res; \
3743 _argvec[0] = (unsigned long)_orig.nraddr; \
3744 _argvec[1] = (unsigned long)( arg1 ); \
3745 _argvec[2] = (unsigned long)( arg2 ); \
3746 _argvec[3] = (unsigned long)( arg3 ); \
3747 _argvec[4] = (unsigned long)( arg4 ); \
3748 _argvec[5] = (unsigned long)( arg5 ); \
3749 _argvec[6] = (unsigned long)( arg6 ); \
3750 _argvec[7] = (unsigned long)( arg7 ); \
3751 _argvec[8] = (unsigned long)( arg8 ); \
3752 _argvec[9] = (unsigned long)( arg9 ); \
3753 _argvec[10] = (unsigned long)( arg10 ); \
3754 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3755 "sw $28, 0($29) \n\t" \
3756 "sw $31, 4($29) \n\t" \
3757 "lw $4, 20(%1) \n\t" \
3758 "subu $29, $29, 48\n\t" \
3759 "sw $4, 16($29) \n\t" \
3760 "lw $4, 24(%1) \n\t" \
3761 "sw $4, 20($29) \n\t" \
3762 "lw $4, 28(%1) \n\t" \
3763 "sw $4, 24($29) \n\t" \
3764 "lw $4, 32(%1) \n\t" \
3765 "sw $4, 28($29) \n\t" \
3766 "lw $4, 36(%1) \n\t" \
3767 "sw $4, 32($29) \n\t" \
3768 "lw $4, 40(%1) \n\t" \
3769 "sw $4, 36($29) \n\t" \
3770 "lw $4, 4(%1) \n\t" \
3771 "lw $5, 8(%1) \n\t" \
3772 "lw $6, 12(%1) \n\t" \
3773 "lw $7, 16(%1) \n\t" \
3774 "lw $25, 0(%1) \n\t" \
3775 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \
3776 "lw $28, 0($29) \n\t" \
3777 "lw $31, 4($29) \n\t" \
3778 "addu $29, $29, 8 \n\t" \
3781 : "0"( &_argvec[0] ) \
3782 : "memory", __CALLER_SAVED_REGS ); \
3783 lval = (__typeof__( lval ))_res; \
3786 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
3788 volatile OrigFn _orig = ( orig ); \
3789 volatile unsigned long _argvec[12]; \
3790 volatile unsigned long _res; \
3791 _argvec[0] = (unsigned long)_orig.nraddr; \
3792 _argvec[1] = (unsigned long)( arg1 ); \
3793 _argvec[2] = (unsigned long)( arg2 ); \
3794 _argvec[3] = (unsigned long)( arg3 ); \
3795 _argvec[4] = (unsigned long)( arg4 ); \
3796 _argvec[5] = (unsigned long)( arg5 ); \
3797 _argvec[6] = (unsigned long)( arg6 ); \
3798 _argvec[7] = (unsigned long)( arg7 ); \
3799 _argvec[8] = (unsigned long)( arg8 ); \
3800 _argvec[9] = (unsigned long)( arg9 ); \
3801 _argvec[10] = (unsigned long)( arg10 ); \
3802 _argvec[11] = (unsigned long)( arg11 ); \
3803 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3804 "sw $28, 0($29) \n\t" \
3805 "sw $31, 4($29) \n\t" \
3806 "lw $4, 20(%1) \n\t" \
3807 "subu $29, $29, 48\n\t" \
3808 "sw $4, 16($29) \n\t" \
3809 "lw $4, 24(%1) \n\t" \
3810 "sw $4, 20($29) \n\t" \
3811 "lw $4, 28(%1) \n\t" \
3812 "sw $4, 24($29) \n\t" \
3813 "lw $4, 32(%1) \n\t" \
3814 "sw $4, 28($29) \n\t" \
3815 "lw $4, 36(%1) \n\t" \
3816 "sw $4, 32($29) \n\t" \
3817 "lw $4, 40(%1) \n\t" \
3818 "sw $4, 36($29) \n\t" \
3819 "lw $4, 44(%1) \n\t" \
3820 "sw $4, 40($29) \n\t" \
3821 "lw $4, 4(%1) \n\t" \
3822 "lw $5, 8(%1) \n\t" \
3823 "lw $6, 12(%1) \n\t" \
3824 "lw $7, 16(%1) \n\t" \
3825 "lw $25, 0(%1) \n\t" \
3826 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \
3827 "lw $28, 0($29) \n\t" \
3828 "lw $31, 4($29) \n\t" \
3829 "addu $29, $29, 8 \n\t" \
3832 : "0"( &_argvec[0] ) \
3833 : "memory", __CALLER_SAVED_REGS ); \
3834 lval = (__typeof__( lval ))_res; \
3837 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
3839 volatile OrigFn _orig = ( orig ); \
3840 volatile unsigned long _argvec[13]; \
3841 volatile unsigned long _res; \
3842 _argvec[0] = (unsigned long)_orig.nraddr; \
3843 _argvec[1] = (unsigned long)( arg1 ); \
3844 _argvec[2] = (unsigned long)( arg2 ); \
3845 _argvec[3] = (unsigned long)( arg3 ); \
3846 _argvec[4] = (unsigned long)( arg4 ); \
3847 _argvec[5] = (unsigned long)( arg5 ); \
3848 _argvec[6] = (unsigned long)( arg6 ); \
3849 _argvec[7] = (unsigned long)( arg7 ); \
3850 _argvec[8] = (unsigned long)( arg8 ); \
3851 _argvec[9] = (unsigned long)( arg9 ); \
3852 _argvec[10] = (unsigned long)( arg10 ); \
3853 _argvec[11] = (unsigned long)( arg11 ); \
3854 _argvec[12] = (unsigned long)( arg12 ); \
3855 __asm__ volatile( "subu $29, $29, 8 \n\t" \
3856 "sw $28, 0($29) \n\t" \
3857 "sw $31, 4($29) \n\t" \
3858 "lw $4, 20(%1) \n\t" \
3859 "subu $29, $29, 56\n\t" \
3860 "sw $4, 16($29) \n\t" \
3861 "lw $4, 24(%1) \n\t" \
3862 "sw $4, 20($29) \n\t" \
3863 "lw $4, 28(%1) \n\t" \
3864 "sw $4, 24($29) \n\t" \
3865 "lw $4, 32(%1) \n\t" \
3866 "sw $4, 28($29) \n\t" \
3867 "lw $4, 36(%1) \n\t" \
3868 "sw $4, 32($29) \n\t" \
3869 "lw $4, 40(%1) \n\t" \
3870 "sw $4, 36($29) \n\t" \
3871 "lw $4, 44(%1) \n\t" \
3872 "sw $4, 40($29) \n\t" \
3873 "lw $4, 48(%1) \n\t" \
3874 "sw $4, 44($29) \n\t" \
3875 "lw $4, 4(%1) \n\t" \
3876 "lw $5, 8(%1) \n\t" \
3877 "lw $6, 12(%1) \n\t" \
3878 "lw $7, 16(%1) \n\t" \
3879 "lw $25, 0(%1) \n\t" \
3880 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 56 \n\t" \
3881 "lw $28, 0($29) \n\t" \
3882 "lw $31, 4($29) \n\t" \
3883 "addu $29, $29, 8 \n\t" \
3886 : "r"( &_argvec[0] ) \
3887 : "memory", __CALLER_SAVED_REGS ); \
3888 lval = (__typeof__( lval ))_res; \
3895 #if defined( PLAT_mips64_linux )
3898 # define __CALLER_SAVED_REGS \
3899 "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31"
3904 # define CALL_FN_W_v( lval, orig ) \
3906 volatile OrigFn _orig = ( orig ); \
3907 volatile unsigned long _argvec[1]; \
3908 volatile unsigned long _res; \
3909 _argvec[0] = (unsigned long)_orig.nraddr; \
3910 __asm__ volatile( "ld $25, 0(%1)\n\t" \
3911 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3913 : "0"( &_argvec[0] ) \
3914 : "memory", __CALLER_SAVED_REGS ); \
3915 lval = (__typeof__( lval ))_res; \
3918 # define CALL_FN_W_W( lval, orig, arg1 ) \
3920 volatile OrigFn _orig = ( orig ); \
3921 volatile unsigned long _argvec[2]; \
3922 volatile unsigned long _res; \
3923 _argvec[0] = (unsigned long)_orig.nraddr; \
3924 _argvec[1] = (unsigned long)( arg1 ); \
3925 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3926 "ld $25, 0(%1)\n\t" \
3927 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3929 : "r"( &_argvec[0] ) \
3930 : "memory", __CALLER_SAVED_REGS ); \
3931 lval = (__typeof__( lval ))_res; \
3934 # define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \
3936 volatile OrigFn _orig = ( orig ); \
3937 volatile unsigned long _argvec[3]; \
3938 volatile unsigned long _res; \
3939 _argvec[0] = (unsigned long)_orig.nraddr; \
3940 _argvec[1] = (unsigned long)( arg1 ); \
3941 _argvec[2] = (unsigned long)( arg2 ); \
3942 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3943 "ld $5, 16(%1)\n\t" \
3944 "ld $25, 0(%1)\n\t" \
3945 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3947 : "r"( &_argvec[0] ) \
3948 : "memory", __CALLER_SAVED_REGS ); \
3949 lval = (__typeof__( lval ))_res; \
3952 # define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \
3954 volatile OrigFn _orig = ( orig ); \
3955 volatile unsigned long _argvec[4]; \
3956 volatile unsigned long _res; \
3957 _argvec[0] = (unsigned long)_orig.nraddr; \
3958 _argvec[1] = (unsigned long)( arg1 ); \
3959 _argvec[2] = (unsigned long)( arg2 ); \
3960 _argvec[3] = (unsigned long)( arg3 ); \
3961 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3962 "ld $5, 16(%1)\n\t" \
3963 "ld $6, 24(%1)\n\t" \
3964 "ld $25, 0(%1)\n\t" \
3965 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3967 : "r"( &_argvec[0] ) \
3968 : "memory", __CALLER_SAVED_REGS ); \
3969 lval = (__typeof__( lval ))_res; \
3972 # define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \
3974 volatile OrigFn _orig = ( orig ); \
3975 volatile unsigned long _argvec[5]; \
3976 volatile unsigned long _res; \
3977 _argvec[0] = (unsigned long)_orig.nraddr; \
3978 _argvec[1] = (unsigned long)( arg1 ); \
3979 _argvec[2] = (unsigned long)( arg2 ); \
3980 _argvec[3] = (unsigned long)( arg3 ); \
3981 _argvec[4] = (unsigned long)( arg4 ); \
3982 __asm__ volatile( "ld $4, 8(%1)\n\t" \
3983 "ld $5, 16(%1)\n\t" \
3984 "ld $6, 24(%1)\n\t" \
3985 "ld $7, 32(%1)\n\t" \
3986 "ld $25, 0(%1)\n\t" \
3987 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
3989 : "r"( &_argvec[0] ) \
3990 : "memory", __CALLER_SAVED_REGS ); \
3991 lval = (__typeof__( lval ))_res; \
3994 # define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \
3996 volatile OrigFn _orig = ( orig ); \
3997 volatile unsigned long _argvec[6]; \
3998 volatile unsigned long _res; \
3999 _argvec[0] = (unsigned long)_orig.nraddr; \
4000 _argvec[1] = (unsigned long)( arg1 ); \
4001 _argvec[2] = (unsigned long)( arg2 ); \
4002 _argvec[3] = (unsigned long)( arg3 ); \
4003 _argvec[4] = (unsigned long)( arg4 ); \
4004 _argvec[5] = (unsigned long)( arg5 ); \
4005 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4006 "ld $5, 16(%1)\n\t" \
4007 "ld $6, 24(%1)\n\t" \
4008 "ld $7, 32(%1)\n\t" \
4009 "ld $8, 40(%1)\n\t" \
4010 "ld $25, 0(%1)\n\t" \
4011 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4013 : "r"( &_argvec[0] ) \
4014 : "memory", __CALLER_SAVED_REGS ); \
4015 lval = (__typeof__( lval ))_res; \
4018 # define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \
4020 volatile OrigFn _orig = ( orig ); \
4021 volatile unsigned long _argvec[7]; \
4022 volatile unsigned long _res; \
4023 _argvec[0] = (unsigned long)_orig.nraddr; \
4024 _argvec[1] = (unsigned long)( arg1 ); \
4025 _argvec[2] = (unsigned long)( arg2 ); \
4026 _argvec[3] = (unsigned long)( arg3 ); \
4027 _argvec[4] = (unsigned long)( arg4 ); \
4028 _argvec[5] = (unsigned long)( arg5 ); \
4029 _argvec[6] = (unsigned long)( arg6 ); \
4030 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4031 "ld $5, 16(%1)\n\t" \
4032 "ld $6, 24(%1)\n\t" \
4033 "ld $7, 32(%1)\n\t" \
4034 "ld $8, 40(%1)\n\t" \
4035 "ld $9, 48(%1)\n\t" \
4036 "ld $25, 0(%1)\n\t" \
4037 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4039 : "r"( &_argvec[0] ) \
4040 : "memory", __CALLER_SAVED_REGS ); \
4041 lval = (__typeof__( lval ))_res; \
4044 # define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \
4046 volatile OrigFn _orig = ( orig ); \
4047 volatile unsigned long _argvec[8]; \
4048 volatile unsigned long _res; \
4049 _argvec[0] = (unsigned long)_orig.nraddr; \
4050 _argvec[1] = (unsigned long)( arg1 ); \
4051 _argvec[2] = (unsigned long)( arg2 ); \
4052 _argvec[3] = (unsigned long)( arg3 ); \
4053 _argvec[4] = (unsigned long)( arg4 ); \
4054 _argvec[5] = (unsigned long)( arg5 ); \
4055 _argvec[6] = (unsigned long)( arg6 ); \
4056 _argvec[7] = (unsigned long)( arg7 ); \
4057 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4058 "ld $5, 16(%1)\n\t" \
4059 "ld $6, 24(%1)\n\t" \
4060 "ld $7, 32(%1)\n\t" \
4061 "ld $8, 40(%1)\n\t" \
4062 "ld $9, 48(%1)\n\t" \
4063 "ld $10, 56(%1)\n\t" \
4064 "ld $25, 0(%1) \n\t" \
4065 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4067 : "r"( &_argvec[0] ) \
4068 : "memory", __CALLER_SAVED_REGS ); \
4069 lval = (__typeof__( lval ))_res; \
4072 # define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \
4074 volatile OrigFn _orig = ( orig ); \
4075 volatile unsigned long _argvec[9]; \
4076 volatile unsigned long _res; \
4077 _argvec[0] = (unsigned long)_orig.nraddr; \
4078 _argvec[1] = (unsigned long)( arg1 ); \
4079 _argvec[2] = (unsigned long)( arg2 ); \
4080 _argvec[3] = (unsigned long)( arg3 ); \
4081 _argvec[4] = (unsigned long)( arg4 ); \
4082 _argvec[5] = (unsigned long)( arg5 ); \
4083 _argvec[6] = (unsigned long)( arg6 ); \
4084 _argvec[7] = (unsigned long)( arg7 ); \
4085 _argvec[8] = (unsigned long)( arg8 ); \
4086 __asm__ volatile( "ld $4, 8(%1)\n\t" \
4087 "ld $5, 16(%1)\n\t" \
4088 "ld $6, 24(%1)\n\t" \
4089 "ld $7, 32(%1)\n\t" \
4090 "ld $8, 40(%1)\n\t" \
4091 "ld $9, 48(%1)\n\t" \
4092 "ld $10, 56(%1)\n\t" \
4093 "ld $11, 64(%1)\n\t" \
4094 "ld $25, 0(%1) \n\t" \
4095 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \
4097 : "r"( &_argvec[0] ) \
4098 : "memory", __CALLER_SAVED_REGS ); \
4099 lval = (__typeof__( lval ))_res; \
4102 # define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \
4104 volatile OrigFn _orig = ( orig ); \
4105 volatile unsigned long _argvec[10]; \
4106 volatile unsigned long _res; \
4107 _argvec[0] = (unsigned long)_orig.nraddr; \
4108 _argvec[1] = (unsigned long)( arg1 ); \
4109 _argvec[2] = (unsigned long)( arg2 ); \
4110 _argvec[3] = (unsigned long)( arg3 ); \
4111 _argvec[4] = (unsigned long)( arg4 ); \
4112 _argvec[5] = (unsigned long)( arg5 ); \
4113 _argvec[6] = (unsigned long)( arg6 ); \
4114 _argvec[7] = (unsigned long)( arg7 ); \
4115 _argvec[8] = (unsigned long)( arg8 ); \
4116 _argvec[9] = (unsigned long)( arg9 ); \
4117 __asm__ volatile( "dsubu $29, $29, 8\n\t" \
4118 "ld $4, 72(%1)\n\t" \
4119 "sd $4, 0($29)\n\t" \
4120 "ld $4, 8(%1)\n\t" \
4121 "ld $5, 16(%1)\n\t" \
4122 "ld $6, 24(%1)\n\t" \
4123 "ld $7, 32(%1)\n\t" \
4124 "ld $8, 40(%1)\n\t" \
4125 "ld $9, 48(%1)\n\t" \
4126 "ld $10, 56(%1)\n\t" \
4127 "ld $11, 64(%1)\n\t" \
4128 "ld $25, 0(%1)\n\t" \
4129 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 8\n\t" \
4132 : "r"( &_argvec[0] ) \
4133 : "memory", __CALLER_SAVED_REGS ); \
4134 lval = (__typeof__( lval ))_res; \
4137 # define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \
4139 volatile OrigFn _orig = ( orig ); \
4140 volatile unsigned long _argvec[11]; \
4141 volatile unsigned long _res; \
4142 _argvec[0] = (unsigned long)_orig.nraddr; \
4143 _argvec[1] = (unsigned long)( arg1 ); \
4144 _argvec[2] = (unsigned long)( arg2 ); \
4145 _argvec[3] = (unsigned long)( arg3 ); \
4146 _argvec[4] = (unsigned long)( arg4 ); \
4147 _argvec[5] = (unsigned long)( arg5 ); \
4148 _argvec[6] = (unsigned long)( arg6 ); \
4149 _argvec[7] = (unsigned long)( arg7 ); \
4150 _argvec[8] = (unsigned long)( arg8 ); \
4151 _argvec[9] = (unsigned long)( arg9 ); \
4152 _argvec[10] = (unsigned long)( arg10 ); \
4153 __asm__ volatile( "dsubu $29, $29, 16\n\t" \
4154 "ld $4, 72(%1)\n\t" \
4155 "sd $4, 0($29)\n\t" \
4156 "ld $4, 80(%1)\n\t" \
4157 "sd $4, 8($29)\n\t" \
4158 "ld $4, 8(%1)\n\t" \
4159 "ld $5, 16(%1)\n\t" \
4160 "ld $6, 24(%1)\n\t" \
4161 "ld $7, 32(%1)\n\t" \
4162 "ld $8, 40(%1)\n\t" \
4163 "ld $9, 48(%1)\n\t" \
4164 "ld $10, 56(%1)\n\t" \
4165 "ld $11, 64(%1)\n\t" \
4166 "ld $25, 0(%1)\n\t" \
4167 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 16\n\t" \
4170 : "r"( &_argvec[0] ) \
4171 : "memory", __CALLER_SAVED_REGS ); \
4172 lval = (__typeof__( lval ))_res; \
4175 # define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \
4177 volatile OrigFn _orig = ( orig ); \
4178 volatile unsigned long _argvec[12]; \
4179 volatile unsigned long _res; \
4180 _argvec[0] = (unsigned long)_orig.nraddr; \
4181 _argvec[1] = (unsigned long)( arg1 ); \
4182 _argvec[2] = (unsigned long)( arg2 ); \
4183 _argvec[3] = (unsigned long)( arg3 ); \
4184 _argvec[4] = (unsigned long)( arg4 ); \
4185 _argvec[5] = (unsigned long)( arg5 ); \
4186 _argvec[6] = (unsigned long)( arg6 ); \
4187 _argvec[7] = (unsigned long)( arg7 ); \
4188 _argvec[8] = (unsigned long)( arg8 ); \
4189 _argvec[9] = (unsigned long)( arg9 ); \
4190 _argvec[10] = (unsigned long)( arg10 ); \
4191 _argvec[11] = (unsigned long)( arg11 ); \
4192 __asm__ volatile( "dsubu $29, $29, 24\n\t" \
4193 "ld $4, 72(%1)\n\t" \
4194 "sd $4, 0($29)\n\t" \
4195 "ld $4, 80(%1)\n\t" \
4196 "sd $4, 8($29)\n\t" \
4197 "ld $4, 88(%1)\n\t" \
4198 "sd $4, 16($29)\n\t" \
4199 "ld $4, 8(%1)\n\t" \
4200 "ld $5, 16(%1)\n\t" \
4201 "ld $6, 24(%1)\n\t" \
4202 "ld $7, 32(%1)\n\t" \
4203 "ld $8, 40(%1)\n\t" \
4204 "ld $9, 48(%1)\n\t" \
4205 "ld $10, 56(%1)\n\t" \
4206 "ld $11, 64(%1)\n\t" \
4207 "ld $25, 0(%1)\n\t" \
4208 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 24\n\t" \
4211 : "r"( &_argvec[0] ) \
4212 : "memory", __CALLER_SAVED_REGS ); \
4213 lval = (__typeof__( lval ))_res; \
4216 # define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \
4218 volatile OrigFn _orig = ( orig ); \
4219 volatile unsigned long _argvec[13]; \
4220 volatile unsigned long _res; \
4221 _argvec[0] = (unsigned long)_orig.nraddr; \
4222 _argvec[1] = (unsigned long)( arg1 ); \
4223 _argvec[2] = (unsigned long)( arg2 ); \
4224 _argvec[3] = (unsigned long)( arg3 ); \
4225 _argvec[4] = (unsigned long)( arg4 ); \
4226 _argvec[5] = (unsigned long)( arg5 ); \
4227 _argvec[6] = (unsigned long)( arg6 ); \
4228 _argvec[7] = (unsigned long)( arg7 ); \
4229 _argvec[8] = (unsigned long)( arg8 ); \
4230 _argvec[9] = (unsigned long)( arg9 ); \
4231 _argvec[10] = (unsigned long)( arg10 ); \
4232 _argvec[11] = (unsigned long)( arg11 ); \
4233 _argvec[12] = (unsigned long)( arg12 ); \
4234 __asm__ volatile( "dsubu $29, $29, 32\n\t" \
4235 "ld $4, 72(%1)\n\t" \
4236 "sd $4, 0($29)\n\t" \
4237 "ld $4, 80(%1)\n\t" \
4238 "sd $4, 8($29)\n\t" \
4239 "ld $4, 88(%1)\n\t" \
4240 "sd $4, 16($29)\n\t" \
4241 "ld $4, 96(%1)\n\t" \
4242 "sd $4, 24($29)\n\t" \
4243 "ld $4, 8(%1)\n\t" \
4244 "ld $5, 16(%1)\n\t" \
4245 "ld $6, 24(%1)\n\t" \
4246 "ld $7, 32(%1)\n\t" \
4247 "ld $8, 40(%1)\n\t" \
4248 "ld $9, 48(%1)\n\t" \
4249 "ld $10, 56(%1)\n\t" \
4250 "ld $11, 64(%1)\n\t" \
4251 "ld $25, 0(%1)\n\t" \
4252 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 32\n\t" \
4255 : "r"( &_argvec[0] ) \
4256 : "memory", __CALLER_SAVED_REGS ); \
4257 lval = (__typeof__( lval ))_res; \
4277 #define VG_USERREQ_TOOL_BASE( a, b ) ( (unsigned int)( ( (a)&0xff ) << 24 | ( (b)&0xff ) << 16 ) )
4278 #define VG_IS_TOOL_USERREQ( a, b, v ) ( VG_USERREQ_TOOL_BASE( a, b ) == ( (v)&0xffff0000 ) )
4357 #if !defined( __GNUC__ )
4358 # define __extension__
4365 #define RUNNING_ON_VALGRIND \
4366 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__RUNNING_ON_VALGRIND, 0, 0, 0, 0, 0 )
4372 #define VALGRIND_DISCARD_TRANSLATIONS( _qzz_addr, _qzz_len ) \
4373 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DISCARD_TRANSLATIONS, _qzz_addr, _qzz_len, 0, 0, 0 )
4380 #if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER )
4383 static int VALGRIND_PRINTF(
const char*
format, ... ) __attribute__( (
format( __printf__, 1, 2 ), __unused__ ) );
4386 #if defined( _MSC_VER )
4389 VALGRIND_PRINTF(
const char*
format, ... ) {
4390 #if defined( NVALGRIND )
4393 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4396 unsigned long _qzz_res;
4399 va_start( vargs,
format );
4400 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4405 (
unsigned long)&vargs, 0, 0, 0 );
4408 return (
int)_qzz_res;
4412 #if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER )
4413 static int VALGRIND_PRINTF_BACKTRACE(
const char*
format, ... )
4414 __attribute__( (
format( __printf__, 1, 2 ), __unused__ ) );
4417 #if defined( _MSC_VER )
4420 VALGRIND_PRINTF_BACKTRACE(
const char*
format, ... ) {
4421 #if defined( NVALGRIND )
4424 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4427 unsigned long _qzz_res;
4430 va_start( vargs,
format );
4431 # if defined( _MSC_VER ) || defined( __MINGW64__ )
4433 (uintptr_t)&vargs, 0, 0, 0 );
4436 (
unsigned long)&vargs, 0, 0, 0 );
4439 return (
int)_qzz_res;
4466 #define VALGRIND_NON_SIMD_CALL0( _qyy_fn ) \
4467 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL0, _qyy_fn, 0, 0, 0, 0 )
4469 #define VALGRIND_NON_SIMD_CALL1( _qyy_fn, _qyy_arg1 ) \
4470 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL1, _qyy_fn, _qyy_arg1, 0, 0, 0 )
4472 #define VALGRIND_NON_SIMD_CALL2( _qyy_fn, _qyy_arg1, _qyy_arg2 ) \
4473 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL2, _qyy_fn, _qyy_arg1, _qyy_arg2, 0, \
4476 #define VALGRIND_NON_SIMD_CALL3( _qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3 ) \
4477 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL3, _qyy_fn, _qyy_arg1, _qyy_arg2, \
4483 #define VALGRIND_COUNT_ERRORS \
4484 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__COUNT_ERRORS, 0, 0, 0, 0, 0 )
4587 #define VALGRIND_MALLOCLIKE_BLOCK( addr, sizeB, rzB, is_zeroed ) \
4588 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MALLOCLIKE_BLOCK, addr, sizeB, rzB, is_zeroed, 0 )
4593 #define VALGRIND_RESIZEINPLACE_BLOCK( addr, oldSizeB, newSizeB, rzB ) \
4594 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__RESIZEINPLACE_BLOCK, addr, oldSizeB, newSizeB, rzB, 0 )
4599 #define VALGRIND_FREELIKE_BLOCK( addr, rzB ) \
4600 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__FREELIKE_BLOCK, addr, rzB, 0, 0, 0 )
4603 #define VALGRIND_CREATE_MEMPOOL( pool, rzB, is_zeroed ) \
4604 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CREATE_MEMPOOL, pool, rzB, is_zeroed, 0, 0 )
4607 #define VALGRIND_DESTROY_MEMPOOL( pool ) \
4608 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DESTROY_MEMPOOL, pool, 0, 0, 0, 0 )
4611 #define VALGRIND_MEMPOOL_ALLOC( pool, addr, size ) \
4612 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_ALLOC, pool, addr, size, 0, 0 )
4615 #define VALGRIND_MEMPOOL_FREE( pool, addr ) \
4616 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_FREE, pool, addr, 0, 0, 0 )
4619 #define VALGRIND_MEMPOOL_TRIM( pool, addr, size ) \
4620 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_TRIM, pool, addr, size, 0, 0 )
4623 #define VALGRIND_MOVE_MEMPOOL( poolA, poolB ) \
4624 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MOVE_MEMPOOL, poolA, poolB, 0, 0, 0 )
4627 #define VALGRIND_MEMPOOL_CHANGE( pool, addrA, addrB, size ) \
4628 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_CHANGE, pool, addrA, addrB, size, 0 )
4631 #define VALGRIND_MEMPOOL_EXISTS( pool ) \
4632 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MEMPOOL_EXISTS, pool, 0, 0, 0, 0 )
4635 #define VALGRIND_STACK_REGISTER( start, end ) \
4636 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__STACK_REGISTER, start, end, 0, 0, 0 )
4640 #define VALGRIND_STACK_DEREGISTER( id ) VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_DEREGISTER, id, 0, 0, 0, 0 )
4643 #define VALGRIND_STACK_CHANGE( id, start, end ) \
4644 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_CHANGE, id, start, end, 0, 0 )
4647 #define VALGRIND_LOAD_PDB_DEBUGINFO( fd, ptr, total_size, delta ) \
4648 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__LOAD_PDB_DEBUGINFO, fd, ptr, total_size, delta, 0 )
4654 #define VALGRIND_MAP_IP_TO_SRCLOC( addr, buf64 ) \
4655 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MAP_IP_TO_SRCLOC, addr, buf64, 0, 0, 0 )
4665 #define VALGRIND_DISABLE_ERROR_REPORTING \
4666 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, 1, 0, 0, 0, 0 )
4670 #define VALGRIND_ENABLE_ERROR_REPORTING \
4671 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, -1, 0, 0, 0, 0 )
4678 #define VALGRIND_MONITOR_COMMAND( command ) \
4679 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__GDB_MONITOR_COMMAND, command, 0, 0, 0, 0 )
4681 #undef PLAT_x86_darwin
4682 #undef PLAT_amd64_darwin
4683 #undef PLAT_x86_win32
4684 #undef PLAT_amd64_win64
4685 #undef PLAT_x86_linux
4686 #undef PLAT_amd64_linux
4687 #undef PLAT_ppc32_linux
4688 #undef PLAT_ppc64_linux
4689 #undef PLAT_arm_linux
4690 #undef PLAT_s390x_linux
4691 #undef PLAT_mips32_linux
4692 #undef PLAT_mips64_linux
@ VG_USERREQ__MAP_IP_TO_SRCLOC
@ VG_USERREQ__CREATE_MEMPOOL
@ VG_USERREQ__FREELIKE_BLOCK
@ VG_USERREQ__LOAD_PDB_DEBUGINFO
@ VG_USERREQ__DISCARD_TRANSLATIONS
@ VG_USERREQ__DESTROY_MEMPOOL
@ VG_USERREQ__MALLOCLIKE_BLOCK
@ VG_USERREQ__CLIENT_CALL2
@ VG_USERREQ__PRINTF_VALIST_BY_REF
@ VG_USERREQ__CLIENT_CALL0
@ VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5)
@ VG_USERREQ__MEMPOOL_TRIM
@ VG_USERREQ__MEMPOOL_CHANGE
@ VG_USERREQ__CLIENT_CALL3
@ VG_USERREQ__MOVE_MEMPOOL
@ VG_USERREQ__COUNT_ERRORS
@ VG_USERREQ__CHANGE_ERR_DISABLEMENT
@ VG_USERREQ__MEMPOOL_EXISTS
@ VG_USERREQ__PRINTF_BACKTRACE
GAUDI_API std::string format(const char *,...)
MsgStream format utility "a la sprintf(...)".
@ VG_USERREQ__STACK_REGISTER
@ VG_USERREQ__GDB_MONITOR_COMMAND
@ VG_USERREQ__STACK_CHANGE
@ VG_USERREQ__RUNNING_ON_VALGRIND
@ VG_USERREQ__VEX_INIT_FOR_IRI
@ VG_USERREQ__CLIENT_CALL1
@ VG_USERREQ__MEMPOOL_FREE
@ VG_USERREQ__STACK_DEREGISTER
@ VG_USERREQ__RESIZEINPLACE_BLOCK
@ VG_USERREQ__MEMPOOL_ALLOC