89 #define __VALGRIND_MAJOR__ 3 90 #define __VALGRIND_MINOR__ 8 108 #undef PLAT_x86_darwin 109 #undef PLAT_amd64_darwin 110 #undef PLAT_x86_win32 111 #undef PLAT_amd64_win64 112 #undef PLAT_x86_linux 113 #undef PLAT_amd64_linux 114 #undef PLAT_ppc32_linux 115 #undef PLAT_ppc64_linux 116 #undef PLAT_arm_linux 117 #undef PLAT_s390x_linux 118 #undef PLAT_mips32_linux 119 #undef PLAT_mips64_linux 121 #if defined( __APPLE__ ) && defined( __i386__ ) 122 #define PLAT_x86_darwin 1 123 #elif defined( __APPLE__ ) && defined( __x86_64__ ) 124 #define PLAT_amd64_darwin 1 125 #elif defined( __MINGW32__ ) || defined( __CYGWIN32__ ) || ( defined( _WIN32 ) && defined( _M_IX86 ) ) 126 #define PLAT_x86_win32 1 127 #elif defined( __MINGW64__ ) || ( defined( _WIN64 ) && defined( _M_X64 ) ) 128 #define PLAT_amd64_win64 1 129 #elif defined( __linux__ ) && defined( __i386__ ) 130 #define PLAT_x86_linux 1 131 #elif defined( __linux__ ) && defined( __x86_64__ ) 132 #define PLAT_amd64_linux 1 133 #elif defined( __linux__ ) && defined( __powerpc__ ) && !defined( __powerpc64__ ) 134 #define PLAT_ppc32_linux 1 135 #elif defined( __linux__ ) && defined( __powerpc__ ) && defined( __powerpc64__ ) 136 #define PLAT_ppc64_linux 1 137 #elif defined( __linux__ ) && defined( __arm__ ) 138 #define PLAT_arm_linux 1 139 #elif defined( __linux__ ) && defined( __s390__ ) && defined( __s390x__ ) 140 #define PLAT_s390x_linux 1 141 #elif defined( __linux__ ) && defined( __mips__ ) 143 #define PLAT_mips64_linux 1 145 #define PLAT_mips32_linux 1 150 #if !defined( NVALGRIND ) 173 #define VALGRIND_DO_CLIENT_REQUEST( _zzq_rlval, _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, \ 174 _zzq_arg4, _zzq_arg5 ) \ 176 ( _zzq_rlval ) = VALGRIND_DO_CLIENT_REQUEST_EXPR( ( _zzq_default ), ( _zzq_request ), ( _zzq_arg1 ), \ 177 ( _zzq_arg2 ), ( _zzq_arg3 ), ( _zzq_arg4 ), ( _zzq_arg5 ) ); \ 180 #define VALGRIND_DO_CLIENT_REQUEST_STMT( _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5 ) \ 182 (void)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, ( _zzq_request ), ( _zzq_arg1 ), ( _zzq_arg2 ), ( _zzq_arg3 ), \ 183 ( _zzq_arg4 ), ( _zzq_arg5 ) ); \ 186 #if defined( NVALGRIND ) 191 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 234 #if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin ) || ( defined( PLAT_x86_win32 ) && defined( __GNUC__ ) ) 240 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 241 "roll $3, %%edi ; roll $13, %%edi\n\t" \ 242 "roll $29, %%edi ; roll $19, %%edi\n\t" 244 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 247 volatile unsigned int _zzq_args[6]; \ 248 volatile unsigned int _zzq_result; \ 249 _zzq_args[0] = (unsigned int)( _zzq_request ); \ 250 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \ 251 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \ 252 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \ 253 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \ 254 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \ 255 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 256 "xchgl %%ebx,%%ebx" \ 257 : "=d"( _zzq_result ) \ 258 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \ 259 : "cc", "memory" ); \ 263 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 265 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 266 volatile unsigned int __addr; \ 267 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 268 "xchgl %%ecx,%%ecx" \ 271 : "cc", "memory" ); \ 272 _zzq_orig->nraddr = __addr; \ 275 #define VALGRIND_CALL_NOREDIR_EAX \ 276 __SPECIAL_INSTRUCTION_PREAMBLE \ 278 "xchgl %%edx,%%edx\n\t" 280 #define VALGRIND_VEX_INJECT_IR() \ 282 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgl %%edi,%%edi\n\t" : : : "cc", "memory" ); \ 289 #if defined( PLAT_x86_win32 ) && !defined( __GNUC__ ) 295 #if defined( _MSC_VER ) 297 #define __SPECIAL_INSTRUCTION_PREAMBLE __asm rol edi, 3 __asm rol edi, 13 __asm rol edi, 29 __asm rol edi, 19 299 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 301 valgrind_do_client_request_expr( ( uintptr_t )( _zzq_default ), ( uintptr_t )( _zzq_request ), \ 302 ( uintptr_t )( _zzq_arg1 ), ( uintptr_t )( _zzq_arg2 ), ( uintptr_t )( _zzq_arg3 ), \ 303 ( uintptr_t )( _zzq_arg4 ), ( uintptr_t )( _zzq_arg5 ) ) 305 static __inline uintptr_t valgrind_do_client_request_expr( uintptr_t _zzq_default, uintptr_t _zzq_request,
306 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
307 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
308 uintptr_t _zzq_arg5 )
310 volatile uintptr_t _zzq_args[6];
311 volatile unsigned int _zzq_result;
312 _zzq_args[0] = ( uintptr_t )( _zzq_request );
313 _zzq_args[1] = ( uintptr_t )( _zzq_arg1 );
314 _zzq_args[2] = ( uintptr_t )( _zzq_arg2 );
315 _zzq_args[3] = ( uintptr_t )( _zzq_arg3 );
316 _zzq_args[4] = ( uintptr_t )( _zzq_arg4 );
317 _zzq_args[5] = ( uintptr_t )( _zzq_arg5 );
318 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
319 __SPECIAL_INSTRUCTION_PREAMBLE
322 __asm mov _zzq_result, edx
327 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 329 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 330 volatile unsigned int __addr; \ 331 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 333 __asm mov __addr, eax \ 335 _zzq_orig->nraddr = __addr; \ 338 #define VALGRIND_CALL_NOREDIR_EAX ERROR 340 #define VALGRIND_VEX_INJECT_IR() \ 342 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 343 __asm xchg edi,edi} \ 347 #error Unsupported compiler. 354 #if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin ) 357 unsigned long long int nraddr;
360 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 361 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \ 362 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t" 364 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 367 volatile unsigned long long int _zzq_args[6]; \ 368 volatile unsigned long long int _zzq_result; \ 369 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \ 370 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \ 371 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \ 372 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \ 373 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \ 374 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \ 375 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 376 "xchgq %%rbx,%%rbx" \ 377 : "=d"( _zzq_result ) \ 378 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \ 379 : "cc", "memory" ); \ 383 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 385 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 386 volatile unsigned long long int __addr; \ 387 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 388 "xchgq %%rcx,%%rcx" \ 391 : "cc", "memory" ); \ 392 _zzq_orig->nraddr = __addr; \ 395 #define VALGRIND_CALL_NOREDIR_RAX \ 396 __SPECIAL_INSTRUCTION_PREAMBLE \ 398 "xchgq %%rdx,%%rdx\n\t" 400 #define VALGRIND_VEX_INJECT_IR() \ 402 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "xchgq %%rdi,%%rdi\n\t" : : : "cc", "memory" ); \ 409 #if defined( PLAT_ppc32_linux ) 415 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 416 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \ 417 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t" 419 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 423 unsigned int _zzq_args[6]; \ 424 unsigned int _zzq_result; \ 425 unsigned int* _zzq_ptr; \ 426 _zzq_args[0] = (unsigned int)( _zzq_request ); \ 427 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \ 428 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \ 429 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \ 430 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \ 431 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \ 432 _zzq_ptr = _zzq_args; \ 433 __asm__ volatile( "mr 3,%1\n\t" \ 435 __SPECIAL_INSTRUCTION_PREAMBLE \ 438 : "=b"( _zzq_result ) \ 439 : "b"( _zzq_default ), "b"( _zzq_ptr ) \ 440 : "cc", "memory", "r3", "r4" ); \ 444 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 446 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 447 unsigned int __addr; \ 448 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 453 : "cc", "memory", "r3" ); \ 454 _zzq_orig->nraddr = __addr; \ 457 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 458 __SPECIAL_INSTRUCTION_PREAMBLE \ 462 #define VALGRIND_VEX_INJECT_IR() \ 464 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); \ 471 #if defined( PLAT_ppc64_linux ) 474 unsigned long long int nraddr;
475 unsigned long long int r2;
478 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 479 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \ 480 "rotldi 0,0,61 ; rotldi 0,0,51\n\t" 482 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 486 unsigned long long int _zzq_args[6]; \ 487 unsigned long long int _zzq_result; \ 488 unsigned long long int* _zzq_ptr; \ 489 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \ 490 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \ 491 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \ 492 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \ 493 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \ 494 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \ 495 _zzq_ptr = _zzq_args; \ 496 __asm__ volatile( "mr 3,%1\n\t" \ 498 __SPECIAL_INSTRUCTION_PREAMBLE \ 501 : "=b"( _zzq_result ) \ 502 : "b"( _zzq_default ), "b"( _zzq_ptr ) \ 503 : "cc", "memory", "r3", "r4" ); \ 507 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 509 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 510 unsigned long long int __addr; \ 511 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 516 : "cc", "memory", "r3" ); \ 517 _zzq_orig->nraddr = __addr; \ 518 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 523 : "cc", "memory", "r3" ); \ 524 _zzq_orig->r2 = __addr; \ 527 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 528 __SPECIAL_INSTRUCTION_PREAMBLE \ 532 #define VALGRIND_VEX_INJECT_IR() \ 534 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t" ); \ 541 #if defined( PLAT_arm_linux ) 547 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 548 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \ 549 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t" 551 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 555 volatile unsigned int _zzq_args[6]; \ 556 volatile unsigned int _zzq_result; \ 557 _zzq_args[0] = (unsigned int)( _zzq_request ); \ 558 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \ 559 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \ 560 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \ 561 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \ 562 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \ 563 __asm__ volatile( "mov r3, %1\n\t" \ 565 __SPECIAL_INSTRUCTION_PREAMBLE \ 566 "orr r10, r10, r10\n\t" \ 568 : "=r"( _zzq_result ) \ 569 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \ 570 : "cc", "memory", "r3", "r4" ); \ 574 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 576 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 577 unsigned int __addr; \ 578 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 579 "orr r11, r11, r11\n\t" \ 583 : "cc", "memory", "r3" ); \ 584 _zzq_orig->nraddr = __addr; \ 587 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 588 __SPECIAL_INSTRUCTION_PREAMBLE \ 590 "orr r12, r12, r12\n\t" 592 #define VALGRIND_VEX_INJECT_IR() \ 594 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "orr r9, r9, r9\n\t" : : : "cc", "memory" ); \ 601 #if defined( PLAT_s390x_linux ) 604 unsigned long long int nraddr;
611 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 617 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t" 618 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t" 619 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t" 620 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t" 622 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 625 volatile unsigned long long int _zzq_args[6]; \ 626 volatile unsigned long long int _zzq_result; \ 627 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \ 628 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \ 629 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \ 630 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \ 631 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \ 632 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \ 635 "lgr 3,%2\n\t" __SPECIAL_INSTRUCTION_PREAMBLE __CLIENT_REQUEST_CODE \ 637 : "=d"( _zzq_result ) \ 638 : "a"( &_zzq_args[0] ), "0"( _zzq_default ) \ 639 : "cc", "2", "3", "memory" ); \ 643 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 645 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 646 volatile unsigned long long int __addr; \ 647 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __GET_NR_CONTEXT_CODE "lgr %0, 3\n\t" \ 650 : "cc", "3", "memory" ); \ 651 _zzq_orig->nraddr = __addr; \ 654 #define VALGRIND_CALL_NOREDIR_R1 \ 655 __SPECIAL_INSTRUCTION_PREAMBLE \ 658 #define VALGRIND_VEX_INJECT_IR() \ 660 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE __VEX_INJECT_IR_CODE ); \ 667 #if defined( PLAT_mips32_linux ) 677 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 678 "srl $0, $0, 13\n\t" \ 679 "srl $0, $0, 29\n\t" \ 680 "srl $0, $0, 3\n\t" \ 683 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 686 volatile unsigned int _zzq_args[6]; \ 687 volatile unsigned int _zzq_result; \ 688 _zzq_args[0] = (unsigned int)( _zzq_request ); \ 689 _zzq_args[1] = (unsigned int)( _zzq_arg1 ); \ 690 _zzq_args[2] = (unsigned int)( _zzq_arg2 ); \ 691 _zzq_args[3] = (unsigned int)( _zzq_arg3 ); \ 692 _zzq_args[4] = (unsigned int)( _zzq_arg4 ); \ 693 _zzq_args[5] = (unsigned int)( _zzq_arg5 ); \ 694 __asm__ volatile( "move $11, %1\n\t" \ 696 __SPECIAL_INSTRUCTION_PREAMBLE \ 697 "or $13, $13, $13\n\t" \ 699 : "=r"( _zzq_result ) \ 700 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \ 705 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 707 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 708 volatile unsigned int __addr; \ 709 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 710 "or $14, $14, $14\n\t" \ 715 _zzq_orig->nraddr = __addr; \ 718 #define VALGRIND_CALL_NOREDIR_T9 \ 719 __SPECIAL_INSTRUCTION_PREAMBLE \ 721 "or $15, $15, $15\n\t" 723 #define VALGRIND_VEX_INJECT_IR() \ 725 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); \ 732 #if defined( PLAT_mips64_linux ) 735 unsigned long long nraddr;
742 #define __SPECIAL_INSTRUCTION_PREAMBLE \ 743 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \ 744 "dsll $0,$0,29 ; dsll $0,$0,19\n\t" 746 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \ 749 volatile unsigned long long int _zzq_args[6]; \ 750 volatile unsigned long long int _zzq_result; \ 751 _zzq_args[0] = (unsigned long long int)( _zzq_request ); \ 752 _zzq_args[1] = (unsigned long long int)( _zzq_arg1 ); \ 753 _zzq_args[2] = (unsigned long long int)( _zzq_arg2 ); \ 754 _zzq_args[3] = (unsigned long long int)( _zzq_arg3 ); \ 755 _zzq_args[4] = (unsigned long long int)( _zzq_arg4 ); \ 756 _zzq_args[5] = (unsigned long long int)( _zzq_arg5 ); \ 757 __asm__ volatile( "move $11, %1\n\t" \ 759 __SPECIAL_INSTRUCTION_PREAMBLE \ 760 "or $13, $13, $13\n\t" \ 762 : "=r"( _zzq_result ) \ 763 : "r"( _zzq_default ), "r"( &_zzq_args[0] ) \ 768 #define VALGRIND_GET_NR_CONTEXT( _zzq_rlval ) \ 770 volatile OrigFn* _zzq_orig = &( _zzq_rlval ); \ 771 volatile unsigned long long int __addr; \ 772 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE \ 773 "or $14, $14, $14\n\t" \ 778 _zzq_orig->nraddr = __addr; \ 781 #define VALGRIND_CALL_NOREDIR_T9 \ 782 __SPECIAL_INSTRUCTION_PREAMBLE \ 784 "or $15, $15, $15\n\t" 786 #define VALGRIND_VEX_INJECT_IR() \ 788 __asm__ volatile( __SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t" ); \ 828 #define VG_CONCAT4( _aa, _bb, _cc, _dd ) _aa##_bb##_cc##_dd 830 #define I_WRAP_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgw00000ZU_, soname, _, fnname ) 832 #define I_WRAP_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgw00000ZZ_, soname, _, fnname ) 838 #define VALGRIND_GET_ORIG_FN( _lval ) VALGRIND_GET_NR_CONTEXT( _lval ) 846 #define I_REPLACE_SONAME_FNNAME_ZU( soname, fnname ) VG_CONCAT4( _vgr00000ZU_, soname, _, fnname ) 848 #define I_REPLACE_SONAME_FNNAME_ZZ( soname, fnname ) VG_CONCAT4( _vgr00000ZZ_, soname, _, fnname ) 853 #define CALL_FN_v_v( fnptr ) \ 855 volatile unsigned long _junk; \ 856 CALL_FN_W_v( _junk, fnptr ); \ 859 #define CALL_FN_v_W( fnptr, arg1 ) \ 861 volatile unsigned long _junk; \ 862 CALL_FN_W_W( _junk, fnptr, arg1 ); \ 865 #define CALL_FN_v_WW( fnptr, arg1, arg2 ) \ 867 volatile unsigned long _junk; \ 868 CALL_FN_W_WW( _junk, fnptr, arg1, arg2 ); \ 871 #define CALL_FN_v_WWW( fnptr, arg1, arg2, arg3 ) \ 873 volatile unsigned long _junk; \ 874 CALL_FN_W_WWW( _junk, fnptr, arg1, arg2, arg3 ); \ 877 #define CALL_FN_v_WWWW( fnptr, arg1, arg2, arg3, arg4 ) \ 879 volatile unsigned long _junk; \ 880 CALL_FN_W_WWWW( _junk, fnptr, arg1, arg2, arg3, arg4 ); \ 883 #define CALL_FN_v_5W( fnptr, arg1, arg2, arg3, arg4, arg5 ) \ 885 volatile unsigned long _junk; \ 886 CALL_FN_W_5W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5 ); \ 889 #define CALL_FN_v_6W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 891 volatile unsigned long _junk; \ 892 CALL_FN_W_6W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6 ); \ 895 #define CALL_FN_v_7W( fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 897 volatile unsigned long _junk; \ 898 CALL_FN_W_7W( _junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ); \ 903 #if defined( PLAT_x86_linux ) || defined( PLAT_x86_darwin ) 907 #define __CALLER_SAVED_REGS "ecx", "edx" 914 #define VALGRIND_ALIGN_STACK \ 915 "movl %%esp,%%edi\n\t" \ 916 "andl $0xfffffff0,%%esp\n\t" 917 #define VALGRIND_RESTORE_STACK "movl %%edi,%%esp\n\t" 922 #define CALL_FN_W_v( lval, orig ) \ 924 volatile OrigFn _orig = ( orig ); \ 925 volatile unsigned long _argvec[1]; \ 926 volatile unsigned long _res; \ 927 _argvec[0] = (unsigned long)_orig.nraddr; \ 928 __asm__ volatile( VALGRIND_ALIGN_STACK "movl (%%eax), %%eax\n\t" \ 929 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 931 : "a"( &_argvec[0] ) \ 932 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 933 lval = (__typeof__( lval ))_res; \ 936 #define CALL_FN_W_W( lval, orig, arg1 ) \ 938 volatile OrigFn _orig = ( orig ); \ 939 volatile unsigned long _argvec[2]; \ 940 volatile unsigned long _res; \ 941 _argvec[0] = (unsigned long)_orig.nraddr; \ 942 _argvec[1] = (unsigned long)( arg1 ); \ 943 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \ 944 "pushl 4(%%eax)\n\t" \ 945 "movl (%%eax), %%eax\n\t" \ 946 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 948 : "a"( &_argvec[0] ) \ 949 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 950 lval = (__typeof__( lval ))_res; \ 953 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 955 volatile OrigFn _orig = ( orig ); \ 956 volatile unsigned long _argvec[3]; \ 957 volatile unsigned long _res; \ 958 _argvec[0] = (unsigned long)_orig.nraddr; \ 959 _argvec[1] = (unsigned long)( arg1 ); \ 960 _argvec[2] = (unsigned long)( arg2 ); \ 961 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \ 962 "pushl 8(%%eax)\n\t" \ 963 "pushl 4(%%eax)\n\t" \ 964 "movl (%%eax), %%eax\n\t" \ 965 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 967 : "a"( &_argvec[0] ) \ 968 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 969 lval = (__typeof__( lval ))_res; \ 972 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 974 volatile OrigFn _orig = ( orig ); \ 975 volatile unsigned long _argvec[4]; \ 976 volatile unsigned long _res; \ 977 _argvec[0] = (unsigned long)_orig.nraddr; \ 978 _argvec[1] = (unsigned long)( arg1 ); \ 979 _argvec[2] = (unsigned long)( arg2 ); \ 980 _argvec[3] = (unsigned long)( arg3 ); \ 981 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \ 982 "pushl 12(%%eax)\n\t" \ 983 "pushl 8(%%eax)\n\t" \ 984 "pushl 4(%%eax)\n\t" \ 985 "movl (%%eax), %%eax\n\t" \ 986 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 988 : "a"( &_argvec[0] ) \ 989 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 990 lval = (__typeof__( lval ))_res; \ 993 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 995 volatile OrigFn _orig = ( orig ); \ 996 volatile unsigned long _argvec[5]; \ 997 volatile unsigned long _res; \ 998 _argvec[0] = (unsigned long)_orig.nraddr; \ 999 _argvec[1] = (unsigned long)( arg1 ); \ 1000 _argvec[2] = (unsigned long)( arg2 ); \ 1001 _argvec[3] = (unsigned long)( arg3 ); \ 1002 _argvec[4] = (unsigned long)( arg4 ); \ 1003 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 16(%%eax)\n\t" \ 1004 "pushl 12(%%eax)\n\t" \ 1005 "pushl 8(%%eax)\n\t" \ 1006 "pushl 4(%%eax)\n\t" \ 1007 "movl (%%eax), %%eax\n\t" \ 1008 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1010 : "a"( &_argvec[0] ) \ 1011 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1012 lval = (__typeof__( lval ))_res; \ 1015 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 1017 volatile OrigFn _orig = ( orig ); \ 1018 volatile unsigned long _argvec[6]; \ 1019 volatile unsigned long _res; \ 1020 _argvec[0] = (unsigned long)_orig.nraddr; \ 1021 _argvec[1] = (unsigned long)( arg1 ); \ 1022 _argvec[2] = (unsigned long)( arg2 ); \ 1023 _argvec[3] = (unsigned long)( arg3 ); \ 1024 _argvec[4] = (unsigned long)( arg4 ); \ 1025 _argvec[5] = (unsigned long)( arg5 ); \ 1026 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \ 1027 "pushl 20(%%eax)\n\t" \ 1028 "pushl 16(%%eax)\n\t" \ 1029 "pushl 12(%%eax)\n\t" \ 1030 "pushl 8(%%eax)\n\t" \ 1031 "pushl 4(%%eax)\n\t" \ 1032 "movl (%%eax), %%eax\n\t" \ 1033 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1035 : "a"( &_argvec[0] ) \ 1036 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1037 lval = (__typeof__( lval ))_res; \ 1040 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 1042 volatile OrigFn _orig = ( orig ); \ 1043 volatile unsigned long _argvec[7]; \ 1044 volatile unsigned long _res; \ 1045 _argvec[0] = (unsigned long)_orig.nraddr; \ 1046 _argvec[1] = (unsigned long)( arg1 ); \ 1047 _argvec[2] = (unsigned long)( arg2 ); \ 1048 _argvec[3] = (unsigned long)( arg3 ); \ 1049 _argvec[4] = (unsigned long)( arg4 ); \ 1050 _argvec[5] = (unsigned long)( arg5 ); \ 1051 _argvec[6] = (unsigned long)( arg6 ); \ 1052 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \ 1053 "pushl 24(%%eax)\n\t" \ 1054 "pushl 20(%%eax)\n\t" \ 1055 "pushl 16(%%eax)\n\t" \ 1056 "pushl 12(%%eax)\n\t" \ 1057 "pushl 8(%%eax)\n\t" \ 1058 "pushl 4(%%eax)\n\t" \ 1059 "movl (%%eax), %%eax\n\t" \ 1060 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1062 : "a"( &_argvec[0] ) \ 1063 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1064 lval = (__typeof__( lval ))_res; \ 1067 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 1069 volatile OrigFn _orig = ( orig ); \ 1070 volatile unsigned long _argvec[8]; \ 1071 volatile unsigned long _res; \ 1072 _argvec[0] = (unsigned long)_orig.nraddr; \ 1073 _argvec[1] = (unsigned long)( arg1 ); \ 1074 _argvec[2] = (unsigned long)( arg2 ); \ 1075 _argvec[3] = (unsigned long)( arg3 ); \ 1076 _argvec[4] = (unsigned long)( arg4 ); \ 1077 _argvec[5] = (unsigned long)( arg5 ); \ 1078 _argvec[6] = (unsigned long)( arg6 ); \ 1079 _argvec[7] = (unsigned long)( arg7 ); \ 1080 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \ 1081 "pushl 28(%%eax)\n\t" \ 1082 "pushl 24(%%eax)\n\t" \ 1083 "pushl 20(%%eax)\n\t" \ 1084 "pushl 16(%%eax)\n\t" \ 1085 "pushl 12(%%eax)\n\t" \ 1086 "pushl 8(%%eax)\n\t" \ 1087 "pushl 4(%%eax)\n\t" \ 1088 "movl (%%eax), %%eax\n\t" \ 1089 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1091 : "a"( &_argvec[0] ) \ 1092 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1093 lval = (__typeof__( lval ))_res; \ 1096 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 1098 volatile OrigFn _orig = ( orig ); \ 1099 volatile unsigned long _argvec[9]; \ 1100 volatile unsigned long _res; \ 1101 _argvec[0] = (unsigned long)_orig.nraddr; \ 1102 _argvec[1] = (unsigned long)( arg1 ); \ 1103 _argvec[2] = (unsigned long)( arg2 ); \ 1104 _argvec[3] = (unsigned long)( arg3 ); \ 1105 _argvec[4] = (unsigned long)( arg4 ); \ 1106 _argvec[5] = (unsigned long)( arg5 ); \ 1107 _argvec[6] = (unsigned long)( arg6 ); \ 1108 _argvec[7] = (unsigned long)( arg7 ); \ 1109 _argvec[8] = (unsigned long)( arg8 ); \ 1110 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 32(%%eax)\n\t" \ 1111 "pushl 28(%%eax)\n\t" \ 1112 "pushl 24(%%eax)\n\t" \ 1113 "pushl 20(%%eax)\n\t" \ 1114 "pushl 16(%%eax)\n\t" \ 1115 "pushl 12(%%eax)\n\t" \ 1116 "pushl 8(%%eax)\n\t" \ 1117 "pushl 4(%%eax)\n\t" \ 1118 "movl (%%eax), %%eax\n\t" \ 1119 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1121 : "a"( &_argvec[0] ) \ 1122 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1123 lval = (__typeof__( lval ))_res; \ 1126 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 1128 volatile OrigFn _orig = ( orig ); \ 1129 volatile unsigned long _argvec[10]; \ 1130 volatile unsigned long _res; \ 1131 _argvec[0] = (unsigned long)_orig.nraddr; \ 1132 _argvec[1] = (unsigned long)( arg1 ); \ 1133 _argvec[2] = (unsigned long)( arg2 ); \ 1134 _argvec[3] = (unsigned long)( arg3 ); \ 1135 _argvec[4] = (unsigned long)( arg4 ); \ 1136 _argvec[5] = (unsigned long)( arg5 ); \ 1137 _argvec[6] = (unsigned long)( arg6 ); \ 1138 _argvec[7] = (unsigned long)( arg7 ); \ 1139 _argvec[8] = (unsigned long)( arg8 ); \ 1140 _argvec[9] = (unsigned long)( arg9 ); \ 1141 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \ 1142 "pushl 36(%%eax)\n\t" \ 1143 "pushl 32(%%eax)\n\t" \ 1144 "pushl 28(%%eax)\n\t" \ 1145 "pushl 24(%%eax)\n\t" \ 1146 "pushl 20(%%eax)\n\t" \ 1147 "pushl 16(%%eax)\n\t" \ 1148 "pushl 12(%%eax)\n\t" \ 1149 "pushl 8(%%eax)\n\t" \ 1150 "pushl 4(%%eax)\n\t" \ 1151 "movl (%%eax), %%eax\n\t" \ 1152 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1154 : "a"( &_argvec[0] ) \ 1155 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1156 lval = (__typeof__( lval ))_res; \ 1159 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 1161 volatile OrigFn _orig = ( orig ); \ 1162 volatile unsigned long _argvec[11]; \ 1163 volatile unsigned long _res; \ 1164 _argvec[0] = (unsigned long)_orig.nraddr; \ 1165 _argvec[1] = (unsigned long)( arg1 ); \ 1166 _argvec[2] = (unsigned long)( arg2 ); \ 1167 _argvec[3] = (unsigned long)( arg3 ); \ 1168 _argvec[4] = (unsigned long)( arg4 ); \ 1169 _argvec[5] = (unsigned long)( arg5 ); \ 1170 _argvec[6] = (unsigned long)( arg6 ); \ 1171 _argvec[7] = (unsigned long)( arg7 ); \ 1172 _argvec[8] = (unsigned long)( arg8 ); \ 1173 _argvec[9] = (unsigned long)( arg9 ); \ 1174 _argvec[10] = (unsigned long)( arg10 ); \ 1175 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \ 1176 "pushl 40(%%eax)\n\t" \ 1177 "pushl 36(%%eax)\n\t" \ 1178 "pushl 32(%%eax)\n\t" \ 1179 "pushl 28(%%eax)\n\t" \ 1180 "pushl 24(%%eax)\n\t" \ 1181 "pushl 20(%%eax)\n\t" \ 1182 "pushl 16(%%eax)\n\t" \ 1183 "pushl 12(%%eax)\n\t" \ 1184 "pushl 8(%%eax)\n\t" \ 1185 "pushl 4(%%eax)\n\t" \ 1186 "movl (%%eax), %%eax\n\t" \ 1187 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1189 : "a"( &_argvec[0] ) \ 1190 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1191 lval = (__typeof__( lval ))_res; \ 1194 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 1196 volatile OrigFn _orig = ( orig ); \ 1197 volatile unsigned long _argvec[12]; \ 1198 volatile unsigned long _res; \ 1199 _argvec[0] = (unsigned long)_orig.nraddr; \ 1200 _argvec[1] = (unsigned long)( arg1 ); \ 1201 _argvec[2] = (unsigned long)( arg2 ); \ 1202 _argvec[3] = (unsigned long)( arg3 ); \ 1203 _argvec[4] = (unsigned long)( arg4 ); \ 1204 _argvec[5] = (unsigned long)( arg5 ); \ 1205 _argvec[6] = (unsigned long)( arg6 ); \ 1206 _argvec[7] = (unsigned long)( arg7 ); \ 1207 _argvec[8] = (unsigned long)( arg8 ); \ 1208 _argvec[9] = (unsigned long)( arg9 ); \ 1209 _argvec[10] = (unsigned long)( arg10 ); \ 1210 _argvec[11] = (unsigned long)( arg11 ); \ 1211 __asm__ volatile( VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \ 1212 "pushl 44(%%eax)\n\t" \ 1213 "pushl 40(%%eax)\n\t" \ 1214 "pushl 36(%%eax)\n\t" \ 1215 "pushl 32(%%eax)\n\t" \ 1216 "pushl 28(%%eax)\n\t" \ 1217 "pushl 24(%%eax)\n\t" \ 1218 "pushl 20(%%eax)\n\t" \ 1219 "pushl 16(%%eax)\n\t" \ 1220 "pushl 12(%%eax)\n\t" \ 1221 "pushl 8(%%eax)\n\t" \ 1222 "pushl 4(%%eax)\n\t" \ 1223 "movl (%%eax), %%eax\n\t" \ 1224 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1226 : "a"( &_argvec[0] ) \ 1227 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1228 lval = (__typeof__( lval ))_res; \ 1231 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 1233 volatile OrigFn _orig = ( orig ); \ 1234 volatile unsigned long _argvec[13]; \ 1235 volatile unsigned long _res; \ 1236 _argvec[0] = (unsigned long)_orig.nraddr; \ 1237 _argvec[1] = (unsigned long)( arg1 ); \ 1238 _argvec[2] = (unsigned long)( arg2 ); \ 1239 _argvec[3] = (unsigned long)( arg3 ); \ 1240 _argvec[4] = (unsigned long)( arg4 ); \ 1241 _argvec[5] = (unsigned long)( arg5 ); \ 1242 _argvec[6] = (unsigned long)( arg6 ); \ 1243 _argvec[7] = (unsigned long)( arg7 ); \ 1244 _argvec[8] = (unsigned long)( arg8 ); \ 1245 _argvec[9] = (unsigned long)( arg9 ); \ 1246 _argvec[10] = (unsigned long)( arg10 ); \ 1247 _argvec[11] = (unsigned long)( arg11 ); \ 1248 _argvec[12] = (unsigned long)( arg12 ); \ 1249 __asm__ volatile( VALGRIND_ALIGN_STACK "pushl 48(%%eax)\n\t" \ 1250 "pushl 44(%%eax)\n\t" \ 1251 "pushl 40(%%eax)\n\t" \ 1252 "pushl 36(%%eax)\n\t" \ 1253 "pushl 32(%%eax)\n\t" \ 1254 "pushl 28(%%eax)\n\t" \ 1255 "pushl 24(%%eax)\n\t" \ 1256 "pushl 20(%%eax)\n\t" \ 1257 "pushl 16(%%eax)\n\t" \ 1258 "pushl 12(%%eax)\n\t" \ 1259 "pushl 8(%%eax)\n\t" \ 1260 "pushl 4(%%eax)\n\t" \ 1261 "movl (%%eax), %%eax\n\t" \ 1262 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \ 1264 : "a"( &_argvec[0] ) \ 1265 : "cc", "memory", __CALLER_SAVED_REGS, "edi" ); \ 1266 lval = (__typeof__( lval ))_res; \ 1273 #if defined( PLAT_amd64_linux ) || defined( PLAT_amd64_darwin ) 1278 #define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11" 1334 #if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM ) 1335 #define __FRAME_POINTER , "r"( __builtin_dwarf_cfa() ) 1336 #define VALGRIND_CFI_PROLOGUE \ 1337 "movq %%rbp, %%r15\n\t" \ 1338 "movq %2, %%rbp\n\t" \ 1339 ".cfi_remember_state\n\t" \ 1340 ".cfi_def_cfa rbp, 0\n\t" 1341 #define VALGRIND_CFI_EPILOGUE \ 1342 "movq %%r15, %%rbp\n\t" \ 1343 ".cfi_restore_state\n\t" 1345 #define __FRAME_POINTER 1346 #define VALGRIND_CFI_PROLOGUE 1347 #define VALGRIND_CFI_EPILOGUE 1355 #define VALGRIND_ALIGN_STACK \ 1356 "movq %%rsp,%%r14\n\t" \ 1357 "andq $0xfffffffffffffff0,%%rsp\n\t" 1358 #define VALGRIND_RESTORE_STACK "movq %%r14,%%rsp\n\t" 1384 #define CALL_FN_W_v( lval, orig ) \ 1386 volatile OrigFn _orig = ( orig ); \ 1387 volatile unsigned long _argvec[1]; \ 1388 volatile unsigned long _res; \ 1389 _argvec[0] = (unsigned long)_orig.nraddr; \ 1390 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1391 "movq (%%rax), %%rax\n\t" \ 1392 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1394 : "a"(&_argvec[0])__FRAME_POINTER \ 1395 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1396 lval = (__typeof__( lval ))_res; \ 1399 #define CALL_FN_W_W( lval, orig, arg1 ) \ 1401 volatile OrigFn _orig = ( orig ); \ 1402 volatile unsigned long _argvec[2]; \ 1403 volatile unsigned long _res; \ 1404 _argvec[0] = (unsigned long)_orig.nraddr; \ 1405 _argvec[1] = (unsigned long)( arg1 ); \ 1406 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1407 "movq 8(%%rax), %%rdi\n\t" \ 1408 "movq (%%rax), %%rax\n\t" \ 1409 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1411 : "a"(&_argvec[0])__FRAME_POINTER \ 1412 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1413 lval = (__typeof__( lval ))_res; \ 1416 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 1418 volatile OrigFn _orig = ( orig ); \ 1419 volatile unsigned long _argvec[3]; \ 1420 volatile unsigned long _res; \ 1421 _argvec[0] = (unsigned long)_orig.nraddr; \ 1422 _argvec[1] = (unsigned long)( arg1 ); \ 1423 _argvec[2] = (unsigned long)( arg2 ); \ 1424 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1425 "movq 16(%%rax), %%rsi\n\t" \ 1426 "movq 8(%%rax), %%rdi\n\t" \ 1427 "movq (%%rax), %%rax\n\t" \ 1428 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1430 : "a"(&_argvec[0])__FRAME_POINTER \ 1431 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1432 lval = (__typeof__( lval ))_res; \ 1435 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 1437 volatile OrigFn _orig = ( orig ); \ 1438 volatile unsigned long _argvec[4]; \ 1439 volatile unsigned long _res; \ 1440 _argvec[0] = (unsigned long)_orig.nraddr; \ 1441 _argvec[1] = (unsigned long)( arg1 ); \ 1442 _argvec[2] = (unsigned long)( arg2 ); \ 1443 _argvec[3] = (unsigned long)( arg3 ); \ 1444 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1445 "movq 24(%%rax), %%rdx\n\t" \ 1446 "movq 16(%%rax), %%rsi\n\t" \ 1447 "movq 8(%%rax), %%rdi\n\t" \ 1448 "movq (%%rax), %%rax\n\t" \ 1449 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1451 : "a"(&_argvec[0])__FRAME_POINTER \ 1452 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1453 lval = (__typeof__( lval ))_res; \ 1456 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 1458 volatile OrigFn _orig = ( orig ); \ 1459 volatile unsigned long _argvec[5]; \ 1460 volatile unsigned long _res; \ 1461 _argvec[0] = (unsigned long)_orig.nraddr; \ 1462 _argvec[1] = (unsigned long)( arg1 ); \ 1463 _argvec[2] = (unsigned long)( arg2 ); \ 1464 _argvec[3] = (unsigned long)( arg3 ); \ 1465 _argvec[4] = (unsigned long)( arg4 ); \ 1466 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1467 "movq 32(%%rax), %%rcx\n\t" \ 1468 "movq 24(%%rax), %%rdx\n\t" \ 1469 "movq 16(%%rax), %%rsi\n\t" \ 1470 "movq 8(%%rax), %%rdi\n\t" \ 1471 "movq (%%rax), %%rax\n\t" \ 1472 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1474 : "a"(&_argvec[0])__FRAME_POINTER \ 1475 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1476 lval = (__typeof__( lval ))_res; \ 1479 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 1481 volatile OrigFn _orig = ( orig ); \ 1482 volatile unsigned long _argvec[6]; \ 1483 volatile unsigned long _res; \ 1484 _argvec[0] = (unsigned long)_orig.nraddr; \ 1485 _argvec[1] = (unsigned long)( arg1 ); \ 1486 _argvec[2] = (unsigned long)( arg2 ); \ 1487 _argvec[3] = (unsigned long)( arg3 ); \ 1488 _argvec[4] = (unsigned long)( arg4 ); \ 1489 _argvec[5] = (unsigned long)( arg5 ); \ 1490 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1491 "movq 40(%%rax), %%r8\n\t" \ 1492 "movq 32(%%rax), %%rcx\n\t" \ 1493 "movq 24(%%rax), %%rdx\n\t" \ 1494 "movq 16(%%rax), %%rsi\n\t" \ 1495 "movq 8(%%rax), %%rdi\n\t" \ 1496 "movq (%%rax), %%rax\n\t" \ 1497 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1499 : "a"(&_argvec[0])__FRAME_POINTER \ 1500 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1501 lval = (__typeof__( lval ))_res; \ 1504 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 1506 volatile OrigFn _orig = ( orig ); \ 1507 volatile unsigned long _argvec[7]; \ 1508 volatile unsigned long _res; \ 1509 _argvec[0] = (unsigned long)_orig.nraddr; \ 1510 _argvec[1] = (unsigned long)( arg1 ); \ 1511 _argvec[2] = (unsigned long)( arg2 ); \ 1512 _argvec[3] = (unsigned long)( arg3 ); \ 1513 _argvec[4] = (unsigned long)( arg4 ); \ 1514 _argvec[5] = (unsigned long)( arg5 ); \ 1515 _argvec[6] = (unsigned long)( arg6 ); \ 1516 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1517 "movq 48(%%rax), %%r9\n\t" \ 1518 "movq 40(%%rax), %%r8\n\t" \ 1519 "movq 32(%%rax), %%rcx\n\t" \ 1520 "movq 24(%%rax), %%rdx\n\t" \ 1521 "movq 16(%%rax), %%rsi\n\t" \ 1522 "movq 8(%%rax), %%rdi\n\t" \ 1523 "movq (%%rax), %%rax\n\t" \ 1524 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1526 : "a"(&_argvec[0])__FRAME_POINTER \ 1527 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1528 lval = (__typeof__( lval ))_res; \ 1531 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 1533 volatile OrigFn _orig = ( orig ); \ 1534 volatile unsigned long _argvec[8]; \ 1535 volatile unsigned long _res; \ 1536 _argvec[0] = (unsigned long)_orig.nraddr; \ 1537 _argvec[1] = (unsigned long)( arg1 ); \ 1538 _argvec[2] = (unsigned long)( arg2 ); \ 1539 _argvec[3] = (unsigned long)( arg3 ); \ 1540 _argvec[4] = (unsigned long)( arg4 ); \ 1541 _argvec[5] = (unsigned long)( arg5 ); \ 1542 _argvec[6] = (unsigned long)( arg6 ); \ 1543 _argvec[7] = (unsigned long)( arg7 ); \ 1544 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \ 1545 "pushq 56(%%rax)\n\t" \ 1546 "movq 48(%%rax), %%r9\n\t" \ 1547 "movq 40(%%rax), %%r8\n\t" \ 1548 "movq 32(%%rax), %%rcx\n\t" \ 1549 "movq 24(%%rax), %%rdx\n\t" \ 1550 "movq 16(%%rax), %%rsi\n\t" \ 1551 "movq 8(%%rax), %%rdi\n\t" \ 1552 "movq (%%rax), %%rax\n\t" \ 1553 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1555 : "a"(&_argvec[0])__FRAME_POINTER \ 1556 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1557 lval = (__typeof__( lval ))_res; \ 1560 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 1562 volatile OrigFn _orig = ( orig ); \ 1563 volatile unsigned long _argvec[9]; \ 1564 volatile unsigned long _res; \ 1565 _argvec[0] = (unsigned long)_orig.nraddr; \ 1566 _argvec[1] = (unsigned long)( arg1 ); \ 1567 _argvec[2] = (unsigned long)( arg2 ); \ 1568 _argvec[3] = (unsigned long)( arg3 ); \ 1569 _argvec[4] = (unsigned long)( arg4 ); \ 1570 _argvec[5] = (unsigned long)( arg5 ); \ 1571 _argvec[6] = (unsigned long)( arg6 ); \ 1572 _argvec[7] = (unsigned long)( arg7 ); \ 1573 _argvec[8] = (unsigned long)( arg8 ); \ 1574 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1575 "pushq 64(%%rax)\n\t" \ 1576 "pushq 56(%%rax)\n\t" \ 1577 "movq 48(%%rax), %%r9\n\t" \ 1578 "movq 40(%%rax), %%r8\n\t" \ 1579 "movq 32(%%rax), %%rcx\n\t" \ 1580 "movq 24(%%rax), %%rdx\n\t" \ 1581 "movq 16(%%rax), %%rsi\n\t" \ 1582 "movq 8(%%rax), %%rdi\n\t" \ 1583 "movq (%%rax), %%rax\n\t" \ 1584 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1586 : "a"(&_argvec[0])__FRAME_POINTER \ 1587 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1588 lval = (__typeof__( lval ))_res; \ 1591 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 1593 volatile OrigFn _orig = ( orig ); \ 1594 volatile unsigned long _argvec[10]; \ 1595 volatile unsigned long _res; \ 1596 _argvec[0] = (unsigned long)_orig.nraddr; \ 1597 _argvec[1] = (unsigned long)( arg1 ); \ 1598 _argvec[2] = (unsigned long)( arg2 ); \ 1599 _argvec[3] = (unsigned long)( arg3 ); \ 1600 _argvec[4] = (unsigned long)( arg4 ); \ 1601 _argvec[5] = (unsigned long)( arg5 ); \ 1602 _argvec[6] = (unsigned long)( arg6 ); \ 1603 _argvec[7] = (unsigned long)( arg7 ); \ 1604 _argvec[8] = (unsigned long)( arg8 ); \ 1605 _argvec[9] = (unsigned long)( arg9 ); \ 1606 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \ 1607 "pushq 72(%%rax)\n\t" \ 1608 "pushq 64(%%rax)\n\t" \ 1609 "pushq 56(%%rax)\n\t" \ 1610 "movq 48(%%rax), %%r9\n\t" \ 1611 "movq 40(%%rax), %%r8\n\t" \ 1612 "movq 32(%%rax), %%rcx\n\t" \ 1613 "movq 24(%%rax), %%rdx\n\t" \ 1614 "movq 16(%%rax), %%rsi\n\t" \ 1615 "movq 8(%%rax), %%rdi\n\t" \ 1616 "movq (%%rax), %%rax\n\t" \ 1617 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1619 : "a"(&_argvec[0])__FRAME_POINTER \ 1620 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1621 lval = (__typeof__( lval ))_res; \ 1624 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 1626 volatile OrigFn _orig = ( orig ); \ 1627 volatile unsigned long _argvec[11]; \ 1628 volatile unsigned long _res; \ 1629 _argvec[0] = (unsigned long)_orig.nraddr; \ 1630 _argvec[1] = (unsigned long)( arg1 ); \ 1631 _argvec[2] = (unsigned long)( arg2 ); \ 1632 _argvec[3] = (unsigned long)( arg3 ); \ 1633 _argvec[4] = (unsigned long)( arg4 ); \ 1634 _argvec[5] = (unsigned long)( arg5 ); \ 1635 _argvec[6] = (unsigned long)( arg6 ); \ 1636 _argvec[7] = (unsigned long)( arg7 ); \ 1637 _argvec[8] = (unsigned long)( arg8 ); \ 1638 _argvec[9] = (unsigned long)( arg9 ); \ 1639 _argvec[10] = (unsigned long)( arg10 ); \ 1640 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1641 "pushq 80(%%rax)\n\t" \ 1642 "pushq 72(%%rax)\n\t" \ 1643 "pushq 64(%%rax)\n\t" \ 1644 "pushq 56(%%rax)\n\t" \ 1645 "movq 48(%%rax), %%r9\n\t" \ 1646 "movq 40(%%rax), %%r8\n\t" \ 1647 "movq 32(%%rax), %%rcx\n\t" \ 1648 "movq 24(%%rax), %%rdx\n\t" \ 1649 "movq 16(%%rax), %%rsi\n\t" \ 1650 "movq 8(%%rax), %%rdi\n\t" \ 1651 "movq (%%rax), %%rax\n\t" \ 1652 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1654 : "a"(&_argvec[0])__FRAME_POINTER \ 1655 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1656 lval = (__typeof__( lval ))_res; \ 1659 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 1661 volatile OrigFn _orig = ( orig ); \ 1662 volatile unsigned long _argvec[12]; \ 1663 volatile unsigned long _res; \ 1664 _argvec[0] = (unsigned long)_orig.nraddr; \ 1665 _argvec[1] = (unsigned long)( arg1 ); \ 1666 _argvec[2] = (unsigned long)( arg2 ); \ 1667 _argvec[3] = (unsigned long)( arg3 ); \ 1668 _argvec[4] = (unsigned long)( arg4 ); \ 1669 _argvec[5] = (unsigned long)( arg5 ); \ 1670 _argvec[6] = (unsigned long)( arg6 ); \ 1671 _argvec[7] = (unsigned long)( arg7 ); \ 1672 _argvec[8] = (unsigned long)( arg8 ); \ 1673 _argvec[9] = (unsigned long)( arg9 ); \ 1674 _argvec[10] = (unsigned long)( arg10 ); \ 1675 _argvec[11] = (unsigned long)( arg11 ); \ 1676 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \ 1677 "pushq 88(%%rax)\n\t" \ 1678 "pushq 80(%%rax)\n\t" \ 1679 "pushq 72(%%rax)\n\t" \ 1680 "pushq 64(%%rax)\n\t" \ 1681 "pushq 56(%%rax)\n\t" \ 1682 "movq 48(%%rax), %%r9\n\t" \ 1683 "movq 40(%%rax), %%r8\n\t" \ 1684 "movq 32(%%rax), %%rcx\n\t" \ 1685 "movq 24(%%rax), %%rdx\n\t" \ 1686 "movq 16(%%rax), %%rsi\n\t" \ 1687 "movq 8(%%rax), %%rdi\n\t" \ 1688 "movq (%%rax), %%rax\n\t" \ 1689 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1691 : "a"(&_argvec[0])__FRAME_POINTER \ 1692 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1693 lval = (__typeof__( lval ))_res; \ 1696 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 1698 volatile OrigFn _orig = ( orig ); \ 1699 volatile unsigned long _argvec[13]; \ 1700 volatile unsigned long _res; \ 1701 _argvec[0] = (unsigned long)_orig.nraddr; \ 1702 _argvec[1] = (unsigned long)( arg1 ); \ 1703 _argvec[2] = (unsigned long)( arg2 ); \ 1704 _argvec[3] = (unsigned long)( arg3 ); \ 1705 _argvec[4] = (unsigned long)( arg4 ); \ 1706 _argvec[5] = (unsigned long)( arg5 ); \ 1707 _argvec[6] = (unsigned long)( arg6 ); \ 1708 _argvec[7] = (unsigned long)( arg7 ); \ 1709 _argvec[8] = (unsigned long)( arg8 ); \ 1710 _argvec[9] = (unsigned long)( arg9 ); \ 1711 _argvec[10] = (unsigned long)( arg10 ); \ 1712 _argvec[11] = (unsigned long)( arg11 ); \ 1713 _argvec[12] = (unsigned long)( arg12 ); \ 1714 __asm__ volatile( VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \ 1715 "pushq 96(%%rax)\n\t" \ 1716 "pushq 88(%%rax)\n\t" \ 1717 "pushq 80(%%rax)\n\t" \ 1718 "pushq 72(%%rax)\n\t" \ 1719 "pushq 64(%%rax)\n\t" \ 1720 "pushq 56(%%rax)\n\t" \ 1721 "movq 48(%%rax), %%r9\n\t" \ 1722 "movq 40(%%rax), %%r8\n\t" \ 1723 "movq 32(%%rax), %%rcx\n\t" \ 1724 "movq 24(%%rax), %%rdx\n\t" \ 1725 "movq 16(%%rax), %%rsi\n\t" \ 1726 "movq 8(%%rax), %%rdi\n\t" \ 1727 "movq (%%rax), %%rax\n\t" \ 1728 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \ 1730 : "a"(&_argvec[0])__FRAME_POINTER \ 1731 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" ); \ 1732 lval = (__typeof__( lval ))_res; \ 1739 #if defined( PLAT_ppc32_linux ) 1765 #define __CALLER_SAVED_REGS \ 1766 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \ 1767 "r7", "r8", "r9", "r10", "r11", "r12", "r13" 1774 #define VALGRIND_ALIGN_STACK \ 1776 "rlwinm 1,1,0,0,27\n\t" 1777 #define VALGRIND_RESTORE_STACK "mr 1,28\n\t" 1782 #define CALL_FN_W_v( lval, orig ) \ 1784 volatile OrigFn _orig = ( orig ); \ 1785 volatile unsigned long _argvec[1]; \ 1786 volatile unsigned long _res; \ 1787 _argvec[0] = (unsigned long)_orig.nraddr; \ 1788 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1789 "lwz 11,0(11)\n\t" \ 1790 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1792 : "r"( &_argvec[0] ) \ 1793 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1794 lval = (__typeof__( lval ))_res; \ 1797 #define CALL_FN_W_W( lval, orig, arg1 ) \ 1799 volatile OrigFn _orig = ( orig ); \ 1800 volatile unsigned long _argvec[2]; \ 1801 volatile unsigned long _res; \ 1802 _argvec[0] = (unsigned long)_orig.nraddr; \ 1803 _argvec[1] = (unsigned long)arg1; \ 1804 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1806 "lwz 11,0(11)\n\t" \ 1807 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1809 : "r"( &_argvec[0] ) \ 1810 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1811 lval = (__typeof__( lval ))_res; \ 1814 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 1816 volatile OrigFn _orig = ( orig ); \ 1817 volatile unsigned long _argvec[3]; \ 1818 volatile unsigned long _res; \ 1819 _argvec[0] = (unsigned long)_orig.nraddr; \ 1820 _argvec[1] = (unsigned long)arg1; \ 1821 _argvec[2] = (unsigned long)arg2; \ 1822 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1825 "lwz 11,0(11)\n\t" \ 1826 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1828 : "r"( &_argvec[0] ) \ 1829 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1830 lval = (__typeof__( lval ))_res; \ 1833 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 1835 volatile OrigFn _orig = ( orig ); \ 1836 volatile unsigned long _argvec[4]; \ 1837 volatile unsigned long _res; \ 1838 _argvec[0] = (unsigned long)_orig.nraddr; \ 1839 _argvec[1] = (unsigned long)arg1; \ 1840 _argvec[2] = (unsigned long)arg2; \ 1841 _argvec[3] = (unsigned long)arg3; \ 1842 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1845 "lwz 5,12(11)\n\t" \ 1846 "lwz 11,0(11)\n\t" \ 1847 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1849 : "r"( &_argvec[0] ) \ 1850 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1851 lval = (__typeof__( lval ))_res; \ 1854 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 1856 volatile OrigFn _orig = ( orig ); \ 1857 volatile unsigned long _argvec[5]; \ 1858 volatile unsigned long _res; \ 1859 _argvec[0] = (unsigned long)_orig.nraddr; \ 1860 _argvec[1] = (unsigned long)arg1; \ 1861 _argvec[2] = (unsigned long)arg2; \ 1862 _argvec[3] = (unsigned long)arg3; \ 1863 _argvec[4] = (unsigned long)arg4; \ 1864 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1867 "lwz 5,12(11)\n\t" \ 1868 "lwz 6,16(11)\n\t" \ 1869 "lwz 11,0(11)\n\t" \ 1870 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1872 : "r"( &_argvec[0] ) \ 1873 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1874 lval = (__typeof__( lval ))_res; \ 1877 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 1879 volatile OrigFn _orig = ( orig ); \ 1880 volatile unsigned long _argvec[6]; \ 1881 volatile unsigned long _res; \ 1882 _argvec[0] = (unsigned long)_orig.nraddr; \ 1883 _argvec[1] = (unsigned long)arg1; \ 1884 _argvec[2] = (unsigned long)arg2; \ 1885 _argvec[3] = (unsigned long)arg3; \ 1886 _argvec[4] = (unsigned long)arg4; \ 1887 _argvec[5] = (unsigned long)arg5; \ 1888 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1891 "lwz 5,12(11)\n\t" \ 1892 "lwz 6,16(11)\n\t" \ 1893 "lwz 7,20(11)\n\t" \ 1894 "lwz 11,0(11)\n\t" \ 1895 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1897 : "r"( &_argvec[0] ) \ 1898 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1899 lval = (__typeof__( lval ))_res; \ 1902 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 1904 volatile OrigFn _orig = ( orig ); \ 1905 volatile unsigned long _argvec[7]; \ 1906 volatile unsigned long _res; \ 1907 _argvec[0] = (unsigned long)_orig.nraddr; \ 1908 _argvec[1] = (unsigned long)arg1; \ 1909 _argvec[2] = (unsigned long)arg2; \ 1910 _argvec[3] = (unsigned long)arg3; \ 1911 _argvec[4] = (unsigned long)arg4; \ 1912 _argvec[5] = (unsigned long)arg5; \ 1913 _argvec[6] = (unsigned long)arg6; \ 1914 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1917 "lwz 5,12(11)\n\t" \ 1918 "lwz 6,16(11)\n\t" \ 1919 "lwz 7,20(11)\n\t" \ 1920 "lwz 8,24(11)\n\t" \ 1921 "lwz 11,0(11)\n\t" \ 1922 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1924 : "r"( &_argvec[0] ) \ 1925 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1926 lval = (__typeof__( lval ))_res; \ 1929 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 1931 volatile OrigFn _orig = ( orig ); \ 1932 volatile unsigned long _argvec[8]; \ 1933 volatile unsigned long _res; \ 1934 _argvec[0] = (unsigned long)_orig.nraddr; \ 1935 _argvec[1] = (unsigned long)arg1; \ 1936 _argvec[2] = (unsigned long)arg2; \ 1937 _argvec[3] = (unsigned long)arg3; \ 1938 _argvec[4] = (unsigned long)arg4; \ 1939 _argvec[5] = (unsigned long)arg5; \ 1940 _argvec[6] = (unsigned long)arg6; \ 1941 _argvec[7] = (unsigned long)arg7; \ 1942 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1945 "lwz 5,12(11)\n\t" \ 1946 "lwz 6,16(11)\n\t" \ 1947 "lwz 7,20(11)\n\t" \ 1948 "lwz 8,24(11)\n\t" \ 1949 "lwz 9,28(11)\n\t" \ 1950 "lwz 11,0(11)\n\t" \ 1951 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1953 : "r"( &_argvec[0] ) \ 1954 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1955 lval = (__typeof__( lval ))_res; \ 1958 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 1960 volatile OrigFn _orig = ( orig ); \ 1961 volatile unsigned long _argvec[9]; \ 1962 volatile unsigned long _res; \ 1963 _argvec[0] = (unsigned long)_orig.nraddr; \ 1964 _argvec[1] = (unsigned long)arg1; \ 1965 _argvec[2] = (unsigned long)arg2; \ 1966 _argvec[3] = (unsigned long)arg3; \ 1967 _argvec[4] = (unsigned long)arg4; \ 1968 _argvec[5] = (unsigned long)arg5; \ 1969 _argvec[6] = (unsigned long)arg6; \ 1970 _argvec[7] = (unsigned long)arg7; \ 1971 _argvec[8] = (unsigned long)arg8; \ 1972 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 1975 "lwz 5,12(11)\n\t" \ 1976 "lwz 6,16(11)\n\t" \ 1977 "lwz 7,20(11)\n\t" \ 1978 "lwz 8,24(11)\n\t" \ 1979 "lwz 9,28(11)\n\t" \ 1980 "lwz 10,32(11)\n\t" \ 1981 "lwz 11,0(11)\n\t" \ 1982 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 1984 : "r"( &_argvec[0] ) \ 1985 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 1986 lval = (__typeof__( lval ))_res; \ 1989 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 1991 volatile OrigFn _orig = ( orig ); \ 1992 volatile unsigned long _argvec[10]; \ 1993 volatile unsigned long _res; \ 1994 _argvec[0] = (unsigned long)_orig.nraddr; \ 1995 _argvec[1] = (unsigned long)arg1; \ 1996 _argvec[2] = (unsigned long)arg2; \ 1997 _argvec[3] = (unsigned long)arg3; \ 1998 _argvec[4] = (unsigned long)arg4; \ 1999 _argvec[5] = (unsigned long)arg5; \ 2000 _argvec[6] = (unsigned long)arg6; \ 2001 _argvec[7] = (unsigned long)arg7; \ 2002 _argvec[8] = (unsigned long)arg8; \ 2003 _argvec[9] = (unsigned long)arg9; \ 2004 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2005 "addi 1,1,-16\n\t" \ 2006 "lwz 3,36(11)\n\t" \ 2010 "lwz 5,12(11)\n\t" \ 2011 "lwz 6,16(11)\n\t" \ 2012 "lwz 7,20(11)\n\t" \ 2013 "lwz 8,24(11)\n\t" \ 2014 "lwz 9,28(11)\n\t" \ 2015 "lwz 10,32(11)\n\t" \ 2016 "lwz 11,0(11)\n\t" \ 2017 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 2019 : "r"( &_argvec[0] ) \ 2020 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2021 lval = (__typeof__( lval ))_res; \ 2024 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 2026 volatile OrigFn _orig = ( orig ); \ 2027 volatile unsigned long _argvec[11]; \ 2028 volatile unsigned long _res; \ 2029 _argvec[0] = (unsigned long)_orig.nraddr; \ 2030 _argvec[1] = (unsigned long)arg1; \ 2031 _argvec[2] = (unsigned long)arg2; \ 2032 _argvec[3] = (unsigned long)arg3; \ 2033 _argvec[4] = (unsigned long)arg4; \ 2034 _argvec[5] = (unsigned long)arg5; \ 2035 _argvec[6] = (unsigned long)arg6; \ 2036 _argvec[7] = (unsigned long)arg7; \ 2037 _argvec[8] = (unsigned long)arg8; \ 2038 _argvec[9] = (unsigned long)arg9; \ 2039 _argvec[10] = (unsigned long)arg10; \ 2040 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2041 "addi 1,1,-16\n\t" \ 2042 "lwz 3,40(11)\n\t" \ 2044 "lwz 3,36(11)\n\t" \ 2048 "lwz 5,12(11)\n\t" \ 2049 "lwz 6,16(11)\n\t" \ 2050 "lwz 7,20(11)\n\t" \ 2051 "lwz 8,24(11)\n\t" \ 2052 "lwz 9,28(11)\n\t" \ 2053 "lwz 10,32(11)\n\t" \ 2054 "lwz 11,0(11)\n\t" \ 2055 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 2057 : "r"( &_argvec[0] ) \ 2058 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2059 lval = (__typeof__( lval ))_res; \ 2062 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 2064 volatile OrigFn _orig = ( orig ); \ 2065 volatile unsigned long _argvec[12]; \ 2066 volatile unsigned long _res; \ 2067 _argvec[0] = (unsigned long)_orig.nraddr; \ 2068 _argvec[1] = (unsigned long)arg1; \ 2069 _argvec[2] = (unsigned long)arg2; \ 2070 _argvec[3] = (unsigned long)arg3; \ 2071 _argvec[4] = (unsigned long)arg4; \ 2072 _argvec[5] = (unsigned long)arg5; \ 2073 _argvec[6] = (unsigned long)arg6; \ 2074 _argvec[7] = (unsigned long)arg7; \ 2075 _argvec[8] = (unsigned long)arg8; \ 2076 _argvec[9] = (unsigned long)arg9; \ 2077 _argvec[10] = (unsigned long)arg10; \ 2078 _argvec[11] = (unsigned long)arg11; \ 2079 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2080 "addi 1,1,-32\n\t" \ 2081 "lwz 3,44(11)\n\t" \ 2083 "lwz 3,40(11)\n\t" \ 2085 "lwz 3,36(11)\n\t" \ 2089 "lwz 5,12(11)\n\t" \ 2090 "lwz 6,16(11)\n\t" \ 2091 "lwz 7,20(11)\n\t" \ 2092 "lwz 8,24(11)\n\t" \ 2093 "lwz 9,28(11)\n\t" \ 2094 "lwz 10,32(11)\n\t" \ 2095 "lwz 11,0(11)\n\t" \ 2096 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 2098 : "r"( &_argvec[0] ) \ 2099 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2100 lval = (__typeof__( lval ))_res; \ 2103 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 2105 volatile OrigFn _orig = ( orig ); \ 2106 volatile unsigned long _argvec[13]; \ 2107 volatile unsigned long _res; \ 2108 _argvec[0] = (unsigned long)_orig.nraddr; \ 2109 _argvec[1] = (unsigned long)arg1; \ 2110 _argvec[2] = (unsigned long)arg2; \ 2111 _argvec[3] = (unsigned long)arg3; \ 2112 _argvec[4] = (unsigned long)arg4; \ 2113 _argvec[5] = (unsigned long)arg5; \ 2114 _argvec[6] = (unsigned long)arg6; \ 2115 _argvec[7] = (unsigned long)arg7; \ 2116 _argvec[8] = (unsigned long)arg8; \ 2117 _argvec[9] = (unsigned long)arg9; \ 2118 _argvec[10] = (unsigned long)arg10; \ 2119 _argvec[11] = (unsigned long)arg11; \ 2120 _argvec[12] = (unsigned long)arg12; \ 2121 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2122 "addi 1,1,-32\n\t" \ 2123 "lwz 3,48(11)\n\t" \ 2125 "lwz 3,44(11)\n\t" \ 2127 "lwz 3,40(11)\n\t" \ 2129 "lwz 3,36(11)\n\t" \ 2133 "lwz 5,12(11)\n\t" \ 2134 "lwz 6,16(11)\n\t" \ 2135 "lwz 7,20(11)\n\t" \ 2136 "lwz 8,24(11)\n\t" \ 2137 "lwz 9,28(11)\n\t" \ 2138 "lwz 10,32(11)\n\t" \ 2139 "lwz 11,0(11)\n\t" \ 2140 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \ 2142 : "r"( &_argvec[0] ) \ 2143 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2144 lval = (__typeof__( lval ))_res; \ 2151 #if defined( PLAT_ppc64_linux ) 2156 #define __CALLER_SAVED_REGS \ 2157 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \ 2158 "r7", "r8", "r9", "r10", "r11", "r12", "r13" 2165 #define VALGRIND_ALIGN_STACK \ 2167 "rldicr 1,1,0,59\n\t" 2168 #define VALGRIND_RESTORE_STACK "mr 1,28\n\t" 2173 #define CALL_FN_W_v( lval, orig ) \ 2175 volatile OrigFn _orig = ( orig ); \ 2176 volatile unsigned long _argvec[3 + 0]; \ 2177 volatile unsigned long _res; \ 2179 _argvec[1] = (unsigned long)_orig.r2; \ 2180 _argvec[2] = (unsigned long)_orig.nraddr; \ 2181 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2182 "std 2,-16(11)\n\t" \ 2184 "ld 11, 0(11)\n\t" \ 2185 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2187 "ld 2,-16(11)\n\t" \ 2188 VALGRIND_RESTORE_STACK \ 2190 : "r"( &_argvec[2] ) \ 2191 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2192 lval = (__typeof__( lval ))_res; \ 2195 #define CALL_FN_W_W( lval, orig, arg1 ) \ 2197 volatile OrigFn _orig = ( orig ); \ 2198 volatile unsigned long _argvec[3 + 1]; \ 2199 volatile unsigned long _res; \ 2201 _argvec[1] = (unsigned long)_orig.r2; \ 2202 _argvec[2] = (unsigned long)_orig.nraddr; \ 2203 _argvec[2 + 1] = (unsigned long)arg1; \ 2204 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2205 "std 2,-16(11)\n\t" \ 2208 "ld 11, 0(11)\n\t" \ 2209 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2211 "ld 2,-16(11)\n\t" \ 2212 VALGRIND_RESTORE_STACK \ 2214 : "r"( &_argvec[2] ) \ 2215 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2216 lval = (__typeof__( lval ))_res; \ 2219 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 2221 volatile OrigFn _orig = ( orig ); \ 2222 volatile unsigned long _argvec[3 + 2]; \ 2223 volatile unsigned long _res; \ 2225 _argvec[1] = (unsigned long)_orig.r2; \ 2226 _argvec[2] = (unsigned long)_orig.nraddr; \ 2227 _argvec[2 + 1] = (unsigned long)arg1; \ 2228 _argvec[2 + 2] = (unsigned long)arg2; \ 2229 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2230 "std 2,-16(11)\n\t" \ 2233 "ld 4, 16(11)\n\t" \ 2234 "ld 11, 0(11)\n\t" \ 2235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2237 "ld 2,-16(11)\n\t" \ 2238 VALGRIND_RESTORE_STACK \ 2240 : "r"( &_argvec[2] ) \ 2241 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2242 lval = (__typeof__( lval ))_res; \ 2245 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 2247 volatile OrigFn _orig = ( orig ); \ 2248 volatile unsigned long _argvec[3 + 3]; \ 2249 volatile unsigned long _res; \ 2251 _argvec[1] = (unsigned long)_orig.r2; \ 2252 _argvec[2] = (unsigned long)_orig.nraddr; \ 2253 _argvec[2 + 1] = (unsigned long)arg1; \ 2254 _argvec[2 + 2] = (unsigned long)arg2; \ 2255 _argvec[2 + 3] = (unsigned long)arg3; \ 2256 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2257 "std 2,-16(11)\n\t" \ 2260 "ld 4, 16(11)\n\t" \ 2261 "ld 5, 24(11)\n\t" \ 2262 "ld 11, 0(11)\n\t" \ 2263 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2265 "ld 2,-16(11)\n\t" \ 2266 VALGRIND_RESTORE_STACK \ 2268 : "r"( &_argvec[2] ) \ 2269 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2270 lval = (__typeof__( lval ))_res; \ 2273 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 2275 volatile OrigFn _orig = ( orig ); \ 2276 volatile unsigned long _argvec[3 + 4]; \ 2277 volatile unsigned long _res; \ 2279 _argvec[1] = (unsigned long)_orig.r2; \ 2280 _argvec[2] = (unsigned long)_orig.nraddr; \ 2281 _argvec[2 + 1] = (unsigned long)arg1; \ 2282 _argvec[2 + 2] = (unsigned long)arg2; \ 2283 _argvec[2 + 3] = (unsigned long)arg3; \ 2284 _argvec[2 + 4] = (unsigned long)arg4; \ 2285 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2286 "std 2,-16(11)\n\t" \ 2289 "ld 4, 16(11)\n\t" \ 2290 "ld 5, 24(11)\n\t" \ 2291 "ld 6, 32(11)\n\t" \ 2292 "ld 11, 0(11)\n\t" \ 2293 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2295 "ld 2,-16(11)\n\t" \ 2296 VALGRIND_RESTORE_STACK \ 2298 : "r"( &_argvec[2] ) \ 2299 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2300 lval = (__typeof__( lval ))_res; \ 2303 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 2305 volatile OrigFn _orig = ( orig ); \ 2306 volatile unsigned long _argvec[3 + 5]; \ 2307 volatile unsigned long _res; \ 2309 _argvec[1] = (unsigned long)_orig.r2; \ 2310 _argvec[2] = (unsigned long)_orig.nraddr; \ 2311 _argvec[2 + 1] = (unsigned long)arg1; \ 2312 _argvec[2 + 2] = (unsigned long)arg2; \ 2313 _argvec[2 + 3] = (unsigned long)arg3; \ 2314 _argvec[2 + 4] = (unsigned long)arg4; \ 2315 _argvec[2 + 5] = (unsigned long)arg5; \ 2316 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2317 "std 2,-16(11)\n\t" \ 2320 "ld 4, 16(11)\n\t" \ 2321 "ld 5, 24(11)\n\t" \ 2322 "ld 6, 32(11)\n\t" \ 2323 "ld 7, 40(11)\n\t" \ 2324 "ld 11, 0(11)\n\t" \ 2325 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2327 "ld 2,-16(11)\n\t" \ 2328 VALGRIND_RESTORE_STACK \ 2330 : "r"( &_argvec[2] ) \ 2331 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2332 lval = (__typeof__( lval ))_res; \ 2335 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 2337 volatile OrigFn _orig = ( orig ); \ 2338 volatile unsigned long _argvec[3 + 6]; \ 2339 volatile unsigned long _res; \ 2341 _argvec[1] = (unsigned long)_orig.r2; \ 2342 _argvec[2] = (unsigned long)_orig.nraddr; \ 2343 _argvec[2 + 1] = (unsigned long)arg1; \ 2344 _argvec[2 + 2] = (unsigned long)arg2; \ 2345 _argvec[2 + 3] = (unsigned long)arg3; \ 2346 _argvec[2 + 4] = (unsigned long)arg4; \ 2347 _argvec[2 + 5] = (unsigned long)arg5; \ 2348 _argvec[2 + 6] = (unsigned long)arg6; \ 2349 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2350 "std 2,-16(11)\n\t" \ 2353 "ld 4, 16(11)\n\t" \ 2354 "ld 5, 24(11)\n\t" \ 2355 "ld 6, 32(11)\n\t" \ 2356 "ld 7, 40(11)\n\t" \ 2357 "ld 8, 48(11)\n\t" \ 2358 "ld 11, 0(11)\n\t" \ 2359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2361 "ld 2,-16(11)\n\t" \ 2362 VALGRIND_RESTORE_STACK \ 2364 : "r"( &_argvec[2] ) \ 2365 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2366 lval = (__typeof__( lval ))_res; \ 2369 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 2371 volatile OrigFn _orig = ( orig ); \ 2372 volatile unsigned long _argvec[3 + 7]; \ 2373 volatile unsigned long _res; \ 2375 _argvec[1] = (unsigned long)_orig.r2; \ 2376 _argvec[2] = (unsigned long)_orig.nraddr; \ 2377 _argvec[2 + 1] = (unsigned long)arg1; \ 2378 _argvec[2 + 2] = (unsigned long)arg2; \ 2379 _argvec[2 + 3] = (unsigned long)arg3; \ 2380 _argvec[2 + 4] = (unsigned long)arg4; \ 2381 _argvec[2 + 5] = (unsigned long)arg5; \ 2382 _argvec[2 + 6] = (unsigned long)arg6; \ 2383 _argvec[2 + 7] = (unsigned long)arg7; \ 2384 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2385 "std 2,-16(11)\n\t" \ 2388 "ld 4, 16(11)\n\t" \ 2389 "ld 5, 24(11)\n\t" \ 2390 "ld 6, 32(11)\n\t" \ 2391 "ld 7, 40(11)\n\t" \ 2392 "ld 8, 48(11)\n\t" \ 2393 "ld 9, 56(11)\n\t" \ 2394 "ld 11, 0(11)\n\t" \ 2395 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2397 "ld 2,-16(11)\n\t" \ 2398 VALGRIND_RESTORE_STACK \ 2400 : "r"( &_argvec[2] ) \ 2401 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2402 lval = (__typeof__( lval ))_res; \ 2405 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 2407 volatile OrigFn _orig = ( orig ); \ 2408 volatile unsigned long _argvec[3 + 8]; \ 2409 volatile unsigned long _res; \ 2411 _argvec[1] = (unsigned long)_orig.r2; \ 2412 _argvec[2] = (unsigned long)_orig.nraddr; \ 2413 _argvec[2 + 1] = (unsigned long)arg1; \ 2414 _argvec[2 + 2] = (unsigned long)arg2; \ 2415 _argvec[2 + 3] = (unsigned long)arg3; \ 2416 _argvec[2 + 4] = (unsigned long)arg4; \ 2417 _argvec[2 + 5] = (unsigned long)arg5; \ 2418 _argvec[2 + 6] = (unsigned long)arg6; \ 2419 _argvec[2 + 7] = (unsigned long)arg7; \ 2420 _argvec[2 + 8] = (unsigned long)arg8; \ 2421 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2422 "std 2,-16(11)\n\t" \ 2425 "ld 4, 16(11)\n\t" \ 2426 "ld 5, 24(11)\n\t" \ 2427 "ld 6, 32(11)\n\t" \ 2428 "ld 7, 40(11)\n\t" \ 2429 "ld 8, 48(11)\n\t" \ 2430 "ld 9, 56(11)\n\t" \ 2431 "ld 10, 64(11)\n\t" \ 2432 "ld 11, 0(11)\n\t" \ 2433 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2435 "ld 2,-16(11)\n\t" \ 2436 VALGRIND_RESTORE_STACK \ 2438 : "r"( &_argvec[2] ) \ 2439 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2440 lval = (__typeof__( lval ))_res; \ 2443 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 2445 volatile OrigFn _orig = ( orig ); \ 2446 volatile unsigned long _argvec[3 + 9]; \ 2447 volatile unsigned long _res; \ 2449 _argvec[1] = (unsigned long)_orig.r2; \ 2450 _argvec[2] = (unsigned long)_orig.nraddr; \ 2451 _argvec[2 + 1] = (unsigned long)arg1; \ 2452 _argvec[2 + 2] = (unsigned long)arg2; \ 2453 _argvec[2 + 3] = (unsigned long)arg3; \ 2454 _argvec[2 + 4] = (unsigned long)arg4; \ 2455 _argvec[2 + 5] = (unsigned long)arg5; \ 2456 _argvec[2 + 6] = (unsigned long)arg6; \ 2457 _argvec[2 + 7] = (unsigned long)arg7; \ 2458 _argvec[2 + 8] = (unsigned long)arg8; \ 2459 _argvec[2 + 9] = (unsigned long)arg9; \ 2460 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2461 "std 2,-16(11)\n\t" \ 2463 "addi 1,1,-128\n\t" \ 2465 "std 3,112(1)\n\t" \ 2467 "ld 4, 16(11)\n\t" \ 2468 "ld 5, 24(11)\n\t" \ 2469 "ld 6, 32(11)\n\t" \ 2470 "ld 7, 40(11)\n\t" \ 2471 "ld 8, 48(11)\n\t" \ 2472 "ld 9, 56(11)\n\t" \ 2473 "ld 10, 64(11)\n\t" \ 2474 "ld 11, 0(11)\n\t" \ 2475 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2477 "ld 2,-16(11)\n\t" \ 2478 VALGRIND_RESTORE_STACK \ 2480 : "r"( &_argvec[2] ) \ 2481 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2482 lval = (__typeof__( lval ))_res; \ 2485 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 2487 volatile OrigFn _orig = ( orig ); \ 2488 volatile unsigned long _argvec[3 + 10]; \ 2489 volatile unsigned long _res; \ 2491 _argvec[1] = (unsigned long)_orig.r2; \ 2492 _argvec[2] = (unsigned long)_orig.nraddr; \ 2493 _argvec[2 + 1] = (unsigned long)arg1; \ 2494 _argvec[2 + 2] = (unsigned long)arg2; \ 2495 _argvec[2 + 3] = (unsigned long)arg3; \ 2496 _argvec[2 + 4] = (unsigned long)arg4; \ 2497 _argvec[2 + 5] = (unsigned long)arg5; \ 2498 _argvec[2 + 6] = (unsigned long)arg6; \ 2499 _argvec[2 + 7] = (unsigned long)arg7; \ 2500 _argvec[2 + 8] = (unsigned long)arg8; \ 2501 _argvec[2 + 9] = (unsigned long)arg9; \ 2502 _argvec[2 + 10] = (unsigned long)arg10; \ 2503 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2504 "std 2,-16(11)\n\t" \ 2506 "addi 1,1,-128\n\t" \ 2508 "std 3,120(1)\n\t" \ 2510 "std 3,112(1)\n\t" \ 2512 "ld 4, 16(11)\n\t" \ 2513 "ld 5, 24(11)\n\t" \ 2514 "ld 6, 32(11)\n\t" \ 2515 "ld 7, 40(11)\n\t" \ 2516 "ld 8, 48(11)\n\t" \ 2517 "ld 9, 56(11)\n\t" \ 2518 "ld 10, 64(11)\n\t" \ 2519 "ld 11, 0(11)\n\t" \ 2520 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2522 "ld 2,-16(11)\n\t" \ 2523 VALGRIND_RESTORE_STACK \ 2525 : "r"( &_argvec[2] ) \ 2526 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2527 lval = (__typeof__( lval ))_res; \ 2530 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 2532 volatile OrigFn _orig = ( orig ); \ 2533 volatile unsigned long _argvec[3 + 11]; \ 2534 volatile unsigned long _res; \ 2536 _argvec[1] = (unsigned long)_orig.r2; \ 2537 _argvec[2] = (unsigned long)_orig.nraddr; \ 2538 _argvec[2 + 1] = (unsigned long)arg1; \ 2539 _argvec[2 + 2] = (unsigned long)arg2; \ 2540 _argvec[2 + 3] = (unsigned long)arg3; \ 2541 _argvec[2 + 4] = (unsigned long)arg4; \ 2542 _argvec[2 + 5] = (unsigned long)arg5; \ 2543 _argvec[2 + 6] = (unsigned long)arg6; \ 2544 _argvec[2 + 7] = (unsigned long)arg7; \ 2545 _argvec[2 + 8] = (unsigned long)arg8; \ 2546 _argvec[2 + 9] = (unsigned long)arg9; \ 2547 _argvec[2 + 10] = (unsigned long)arg10; \ 2548 _argvec[2 + 11] = (unsigned long)arg11; \ 2549 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2550 "std 2,-16(11)\n\t" \ 2552 "addi 1,1,-144\n\t" \ 2554 "std 3,128(1)\n\t" \ 2556 "std 3,120(1)\n\t" \ 2558 "std 3,112(1)\n\t" \ 2560 "ld 4, 16(11)\n\t" \ 2561 "ld 5, 24(11)\n\t" \ 2562 "ld 6, 32(11)\n\t" \ 2563 "ld 7, 40(11)\n\t" \ 2564 "ld 8, 48(11)\n\t" \ 2565 "ld 9, 56(11)\n\t" \ 2566 "ld 10, 64(11)\n\t" \ 2567 "ld 11, 0(11)\n\t" \ 2568 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2570 "ld 2,-16(11)\n\t" \ 2571 VALGRIND_RESTORE_STACK \ 2573 : "r"( &_argvec[2] ) \ 2574 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2575 lval = (__typeof__( lval ))_res; \ 2578 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 2580 volatile OrigFn _orig = ( orig ); \ 2581 volatile unsigned long _argvec[3 + 12]; \ 2582 volatile unsigned long _res; \ 2584 _argvec[1] = (unsigned long)_orig.r2; \ 2585 _argvec[2] = (unsigned long)_orig.nraddr; \ 2586 _argvec[2 + 1] = (unsigned long)arg1; \ 2587 _argvec[2 + 2] = (unsigned long)arg2; \ 2588 _argvec[2 + 3] = (unsigned long)arg3; \ 2589 _argvec[2 + 4] = (unsigned long)arg4; \ 2590 _argvec[2 + 5] = (unsigned long)arg5; \ 2591 _argvec[2 + 6] = (unsigned long)arg6; \ 2592 _argvec[2 + 7] = (unsigned long)arg7; \ 2593 _argvec[2 + 8] = (unsigned long)arg8; \ 2594 _argvec[2 + 9] = (unsigned long)arg9; \ 2595 _argvec[2 + 10] = (unsigned long)arg10; \ 2596 _argvec[2 + 11] = (unsigned long)arg11; \ 2597 _argvec[2 + 12] = (unsigned long)arg12; \ 2598 __asm__ volatile( VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \ 2599 "std 2,-16(11)\n\t" \ 2601 "addi 1,1,-144\n\t" \ 2603 "std 3,136(1)\n\t" \ 2605 "std 3,128(1)\n\t" \ 2607 "std 3,120(1)\n\t" \ 2609 "std 3,112(1)\n\t" \ 2611 "ld 4, 16(11)\n\t" \ 2612 "ld 5, 24(11)\n\t" \ 2613 "ld 6, 32(11)\n\t" \ 2614 "ld 7, 40(11)\n\t" \ 2615 "ld 8, 48(11)\n\t" \ 2616 "ld 9, 56(11)\n\t" \ 2617 "ld 10, 64(11)\n\t" \ 2618 "ld 11, 0(11)\n\t" \ 2619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \ 2621 "ld 2,-16(11)\n\t" \ 2622 VALGRIND_RESTORE_STACK \ 2624 : "r"( &_argvec[2] ) \ 2625 : "cc", "memory", __CALLER_SAVED_REGS, "r28" ); \ 2626 lval = (__typeof__( lval ))_res; \ 2633 #if defined( PLAT_arm_linux ) 2636 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r14" 2651 #define VALGRIND_ALIGN_STACK \ 2654 "bic r4, r4, #7\n\t" \ 2656 #define VALGRIND_RESTORE_STACK "mov sp, r10\n\t" 2661 #define CALL_FN_W_v( lval, orig ) \ 2663 volatile OrigFn _orig = ( orig ); \ 2664 volatile unsigned long _argvec[1]; \ 2665 volatile unsigned long _res; \ 2666 _argvec[0] = (unsigned long)_orig.nraddr; \ 2667 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r4, [%1] \n\t" \ 2668 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \ 2670 : "0"( &_argvec[0] ) \ 2671 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2672 lval = (__typeof__( lval ))_res; \ 2675 #define CALL_FN_W_W( lval, orig, arg1 ) \ 2677 volatile OrigFn _orig = ( orig ); \ 2678 volatile unsigned long _argvec[2]; \ 2679 volatile unsigned long _res; \ 2680 _argvec[0] = (unsigned long)_orig.nraddr; \ 2681 _argvec[1] = (unsigned long)( arg1 ); \ 2682 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \ 2683 "ldr r4, [%1] \n\t" \ 2684 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \ 2686 : "0"( &_argvec[0] ) \ 2687 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2688 lval = (__typeof__( lval ))_res; \ 2691 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 2693 volatile OrigFn _orig = ( orig ); \ 2694 volatile unsigned long _argvec[3]; \ 2695 volatile unsigned long _res; \ 2696 _argvec[0] = (unsigned long)_orig.nraddr; \ 2697 _argvec[1] = (unsigned long)( arg1 ); \ 2698 _argvec[2] = (unsigned long)( arg2 ); \ 2699 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \ 2700 "ldr r1, [%1, #8] \n\t" \ 2701 "ldr r4, [%1] \n\t" \ 2702 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \ 2704 : "0"( &_argvec[0] ) \ 2705 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2706 lval = (__typeof__( lval ))_res; \ 2709 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 2711 volatile OrigFn _orig = ( orig ); \ 2712 volatile unsigned long _argvec[4]; \ 2713 volatile unsigned long _res; \ 2714 _argvec[0] = (unsigned long)_orig.nraddr; \ 2715 _argvec[1] = (unsigned long)( arg1 ); \ 2716 _argvec[2] = (unsigned long)( arg2 ); \ 2717 _argvec[3] = (unsigned long)( arg3 ); \ 2718 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \ 2719 "ldr r1, [%1, #8] \n\t" \ 2720 "ldr r2, [%1, #12] \n\t" \ 2721 "ldr r4, [%1] \n\t" \ 2722 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \ 2724 : "0"( &_argvec[0] ) \ 2725 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2726 lval = (__typeof__( lval ))_res; \ 2729 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 2731 volatile OrigFn _orig = ( orig ); \ 2732 volatile unsigned long _argvec[5]; \ 2733 volatile unsigned long _res; \ 2734 _argvec[0] = (unsigned long)_orig.nraddr; \ 2735 _argvec[1] = (unsigned long)( arg1 ); \ 2736 _argvec[2] = (unsigned long)( arg2 ); \ 2737 _argvec[3] = (unsigned long)( arg3 ); \ 2738 _argvec[4] = (unsigned long)( arg4 ); \ 2739 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \ 2740 "ldr r1, [%1, #8] \n\t" \ 2741 "ldr r2, [%1, #12] \n\t" \ 2742 "ldr r3, [%1, #16] \n\t" \ 2743 "ldr r4, [%1] \n\t" \ 2744 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2746 : "0"( &_argvec[0] ) \ 2747 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2748 lval = (__typeof__( lval ))_res; \ 2751 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 2753 volatile OrigFn _orig = ( orig ); \ 2754 volatile unsigned long _argvec[6]; \ 2755 volatile unsigned long _res; \ 2756 _argvec[0] = (unsigned long)_orig.nraddr; \ 2757 _argvec[1] = (unsigned long)( arg1 ); \ 2758 _argvec[2] = (unsigned long)( arg2 ); \ 2759 _argvec[3] = (unsigned long)( arg3 ); \ 2760 _argvec[4] = (unsigned long)( arg4 ); \ 2761 _argvec[5] = (unsigned long)( arg5 ); \ 2762 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \ 2763 "ldr r0, [%1, #20] \n\t" \ 2765 "ldr r0, [%1, #4] \n\t" \ 2766 "ldr r1, [%1, #8] \n\t" \ 2767 "ldr r2, [%1, #12] \n\t" \ 2768 "ldr r3, [%1, #16] \n\t" \ 2769 "ldr r4, [%1] \n\t" \ 2770 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2772 : "0"( &_argvec[0] ) \ 2773 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2774 lval = (__typeof__( lval ))_res; \ 2777 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 2779 volatile OrigFn _orig = ( orig ); \ 2780 volatile unsigned long _argvec[7]; \ 2781 volatile unsigned long _res; \ 2782 _argvec[0] = (unsigned long)_orig.nraddr; \ 2783 _argvec[1] = (unsigned long)( arg1 ); \ 2784 _argvec[2] = (unsigned long)( arg2 ); \ 2785 _argvec[3] = (unsigned long)( arg3 ); \ 2786 _argvec[4] = (unsigned long)( arg4 ); \ 2787 _argvec[5] = (unsigned long)( arg5 ); \ 2788 _argvec[6] = (unsigned long)( arg6 ); \ 2789 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \ 2790 "ldr r1, [%1, #24] \n\t" \ 2791 "push {r0, r1} \n\t" \ 2792 "ldr r0, [%1, #4] \n\t" \ 2793 "ldr r1, [%1, #8] \n\t" \ 2794 "ldr r2, [%1, #12] \n\t" \ 2795 "ldr r3, [%1, #16] \n\t" \ 2796 "ldr r4, [%1] \n\t" \ 2797 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2799 : "0"( &_argvec[0] ) \ 2800 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2801 lval = (__typeof__( lval ))_res; \ 2804 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 2806 volatile OrigFn _orig = ( orig ); \ 2807 volatile unsigned long _argvec[8]; \ 2808 volatile unsigned long _res; \ 2809 _argvec[0] = (unsigned long)_orig.nraddr; \ 2810 _argvec[1] = (unsigned long)( arg1 ); \ 2811 _argvec[2] = (unsigned long)( arg2 ); \ 2812 _argvec[3] = (unsigned long)( arg3 ); \ 2813 _argvec[4] = (unsigned long)( arg4 ); \ 2814 _argvec[5] = (unsigned long)( arg5 ); \ 2815 _argvec[6] = (unsigned long)( arg6 ); \ 2816 _argvec[7] = (unsigned long)( arg7 ); \ 2817 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \ 2818 "ldr r0, [%1, #20] \n\t" \ 2819 "ldr r1, [%1, #24] \n\t" \ 2820 "ldr r2, [%1, #28] \n\t" \ 2821 "push {r0, r1, r2} \n\t" \ 2822 "ldr r0, [%1, #4] \n\t" \ 2823 "ldr r1, [%1, #8] \n\t" \ 2824 "ldr r2, [%1, #12] \n\t" \ 2825 "ldr r3, [%1, #16] \n\t" \ 2826 "ldr r4, [%1] \n\t" \ 2827 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2829 : "0"( &_argvec[0] ) \ 2830 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2831 lval = (__typeof__( lval ))_res; \ 2834 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 2836 volatile OrigFn _orig = ( orig ); \ 2837 volatile unsigned long _argvec[9]; \ 2838 volatile unsigned long _res; \ 2839 _argvec[0] = (unsigned long)_orig.nraddr; \ 2840 _argvec[1] = (unsigned long)( arg1 ); \ 2841 _argvec[2] = (unsigned long)( arg2 ); \ 2842 _argvec[3] = (unsigned long)( arg3 ); \ 2843 _argvec[4] = (unsigned long)( arg4 ); \ 2844 _argvec[5] = (unsigned long)( arg5 ); \ 2845 _argvec[6] = (unsigned long)( arg6 ); \ 2846 _argvec[7] = (unsigned long)( arg7 ); \ 2847 _argvec[8] = (unsigned long)( arg8 ); \ 2848 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \ 2849 "ldr r1, [%1, #24] \n\t" \ 2850 "ldr r2, [%1, #28] \n\t" \ 2851 "ldr r3, [%1, #32] \n\t" \ 2852 "push {r0, r1, r2, r3} \n\t" \ 2853 "ldr r0, [%1, #4] \n\t" \ 2854 "ldr r1, [%1, #8] \n\t" \ 2855 "ldr r2, [%1, #12] \n\t" \ 2856 "ldr r3, [%1, #16] \n\t" \ 2857 "ldr r4, [%1] \n\t" \ 2858 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2860 : "0"( &_argvec[0] ) \ 2861 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2862 lval = (__typeof__( lval ))_res; \ 2865 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 2867 volatile OrigFn _orig = ( orig ); \ 2868 volatile unsigned long _argvec[10]; \ 2869 volatile unsigned long _res; \ 2870 _argvec[0] = (unsigned long)_orig.nraddr; \ 2871 _argvec[1] = (unsigned long)( arg1 ); \ 2872 _argvec[2] = (unsigned long)( arg2 ); \ 2873 _argvec[3] = (unsigned long)( arg3 ); \ 2874 _argvec[4] = (unsigned long)( arg4 ); \ 2875 _argvec[5] = (unsigned long)( arg5 ); \ 2876 _argvec[6] = (unsigned long)( arg6 ); \ 2877 _argvec[7] = (unsigned long)( arg7 ); \ 2878 _argvec[8] = (unsigned long)( arg8 ); \ 2879 _argvec[9] = (unsigned long)( arg9 ); \ 2880 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \ 2881 "ldr r0, [%1, #20] \n\t" \ 2882 "ldr r1, [%1, #24] \n\t" \ 2883 "ldr r2, [%1, #28] \n\t" \ 2884 "ldr r3, [%1, #32] \n\t" \ 2885 "ldr r4, [%1, #36] \n\t" \ 2886 "push {r0, r1, r2, r3, r4} \n\t" \ 2887 "ldr r0, [%1, #4] \n\t" \ 2888 "ldr r1, [%1, #8] \n\t" \ 2889 "ldr r2, [%1, #12] \n\t" \ 2890 "ldr r3, [%1, #16] \n\t" \ 2891 "ldr r4, [%1] \n\t" \ 2892 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2894 : "0"( &_argvec[0] ) \ 2895 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2896 lval = (__typeof__( lval ))_res; \ 2899 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 2901 volatile OrigFn _orig = ( orig ); \ 2902 volatile unsigned long _argvec[11]; \ 2903 volatile unsigned long _res; \ 2904 _argvec[0] = (unsigned long)_orig.nraddr; \ 2905 _argvec[1] = (unsigned long)( arg1 ); \ 2906 _argvec[2] = (unsigned long)( arg2 ); \ 2907 _argvec[3] = (unsigned long)( arg3 ); \ 2908 _argvec[4] = (unsigned long)( arg4 ); \ 2909 _argvec[5] = (unsigned long)( arg5 ); \ 2910 _argvec[6] = (unsigned long)( arg6 ); \ 2911 _argvec[7] = (unsigned long)( arg7 ); \ 2912 _argvec[8] = (unsigned long)( arg8 ); \ 2913 _argvec[9] = (unsigned long)( arg9 ); \ 2914 _argvec[10] = (unsigned long)( arg10 ); \ 2915 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \ 2917 "ldr r0, [%1, #20] \n\t" \ 2918 "ldr r1, [%1, #24] \n\t" \ 2919 "ldr r2, [%1, #28] \n\t" \ 2920 "ldr r3, [%1, #32] \n\t" \ 2921 "ldr r4, [%1, #36] \n\t" \ 2922 "push {r0, r1, r2, r3, r4} \n\t" \ 2923 "ldr r0, [%1, #4] \n\t" \ 2924 "ldr r1, [%1, #8] \n\t" \ 2925 "ldr r2, [%1, #12] \n\t" \ 2926 "ldr r3, [%1, #16] \n\t" \ 2927 "ldr r4, [%1] \n\t" \ 2928 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2930 : "0"( &_argvec[0] ) \ 2931 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2932 lval = (__typeof__( lval ))_res; \ 2935 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 2937 volatile OrigFn _orig = ( orig ); \ 2938 volatile unsigned long _argvec[12]; \ 2939 volatile unsigned long _res; \ 2940 _argvec[0] = (unsigned long)_orig.nraddr; \ 2941 _argvec[1] = (unsigned long)( arg1 ); \ 2942 _argvec[2] = (unsigned long)( arg2 ); \ 2943 _argvec[3] = (unsigned long)( arg3 ); \ 2944 _argvec[4] = (unsigned long)( arg4 ); \ 2945 _argvec[5] = (unsigned long)( arg5 ); \ 2946 _argvec[6] = (unsigned long)( arg6 ); \ 2947 _argvec[7] = (unsigned long)( arg7 ); \ 2948 _argvec[8] = (unsigned long)( arg8 ); \ 2949 _argvec[9] = (unsigned long)( arg9 ); \ 2950 _argvec[10] = (unsigned long)( arg10 ); \ 2951 _argvec[11] = (unsigned long)( arg11 ); \ 2952 __asm__ volatile( VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \ 2953 "ldr r0, [%1, #40] \n\t" \ 2954 "ldr r1, [%1, #44] \n\t" \ 2955 "push {r0, r1} \n\t" \ 2956 "ldr r0, [%1, #20] \n\t" \ 2957 "ldr r1, [%1, #24] \n\t" \ 2958 "ldr r2, [%1, #28] \n\t" \ 2959 "ldr r3, [%1, #32] \n\t" \ 2960 "ldr r4, [%1, #36] \n\t" \ 2961 "push {r0, r1, r2, r3, r4} \n\t" \ 2962 "ldr r0, [%1, #4] \n\t" \ 2963 "ldr r1, [%1, #8] \n\t" \ 2964 "ldr r2, [%1, #12] \n\t" \ 2965 "ldr r3, [%1, #16] \n\t" \ 2966 "ldr r4, [%1] \n\t" \ 2967 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 2969 : "0"( &_argvec[0] ) \ 2970 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 2971 lval = (__typeof__( lval ))_res; \ 2974 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 2976 volatile OrigFn _orig = ( orig ); \ 2977 volatile unsigned long _argvec[13]; \ 2978 volatile unsigned long _res; \ 2979 _argvec[0] = (unsigned long)_orig.nraddr; \ 2980 _argvec[1] = (unsigned long)( arg1 ); \ 2981 _argvec[2] = (unsigned long)( arg2 ); \ 2982 _argvec[3] = (unsigned long)( arg3 ); \ 2983 _argvec[4] = (unsigned long)( arg4 ); \ 2984 _argvec[5] = (unsigned long)( arg5 ); \ 2985 _argvec[6] = (unsigned long)( arg6 ); \ 2986 _argvec[7] = (unsigned long)( arg7 ); \ 2987 _argvec[8] = (unsigned long)( arg8 ); \ 2988 _argvec[9] = (unsigned long)( arg9 ); \ 2989 _argvec[10] = (unsigned long)( arg10 ); \ 2990 _argvec[11] = (unsigned long)( arg11 ); \ 2991 _argvec[12] = (unsigned long)( arg12 ); \ 2992 __asm__ volatile( VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \ 2993 "ldr r1, [%1, #44] \n\t" \ 2994 "ldr r2, [%1, #48] \n\t" \ 2995 "push {r0, r1, r2} \n\t" \ 2996 "ldr r0, [%1, #20] \n\t" \ 2997 "ldr r1, [%1, #24] \n\t" \ 2998 "ldr r2, [%1, #28] \n\t" \ 2999 "ldr r3, [%1, #32] \n\t" \ 3000 "ldr r4, [%1, #36] \n\t" \ 3001 "push {r0, r1, r2, r3, r4} \n\t" \ 3002 "ldr r0, [%1, #4] \n\t" \ 3003 "ldr r1, [%1, #8] \n\t" \ 3004 "ldr r2, [%1, #12] \n\t" \ 3005 "ldr r3, [%1, #16] \n\t" \ 3006 "ldr r4, [%1] \n\t" \ 3007 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \ 3009 : "0"( &_argvec[0] ) \ 3010 : "cc", "memory", __CALLER_SAVED_REGS, "r10" ); \ 3011 lval = (__typeof__( lval ))_res; \ 3018 #if defined( PLAT_s390x_linux ) 3024 #if defined( __GNUC__ ) && defined( __GCC_HAVE_DWARF2_CFI_ASM ) 3025 #define __FRAME_POINTER , "d"( __builtin_dwarf_cfa() ) 3026 #define VALGRIND_CFI_PROLOGUE \ 3027 ".cfi_remember_state\n\t" \ 3031 ".cfi_def_cfa r11, 0\n\t" 3032 #define VALGRIND_CFI_EPILOGUE \ 3034 ".cfi_restore_state\n\t" 3036 #define __FRAME_POINTER 3037 #define VALGRIND_CFI_PROLOGUE "lgr 1,%1\n\t" 3038 #define VALGRIND_CFI_EPILOGUE 3050 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7" 3061 #define CALL_FN_W_v( lval, orig ) \ 3063 volatile OrigFn _orig = ( orig ); \ 3064 volatile unsigned long _argvec[1]; \ 3065 volatile unsigned long _res; \ 3066 _argvec[0] = (unsigned long)_orig.nraddr; \ 3067 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \ 3069 VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3070 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \ 3072 : "d"(&_argvec[0])__FRAME_POINTER \ 3073 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \ 3074 lval = (__typeof__( lval ))_res; \ 3078 #define CALL_FN_W_W( lval, orig, arg1 ) \ 3080 volatile OrigFn _orig = ( orig ); \ 3081 volatile unsigned long _argvec[2]; \ 3082 volatile unsigned long _res; \ 3083 _argvec[0] = (unsigned long)_orig.nraddr; \ 3084 _argvec[1] = (unsigned long)arg1; \ 3085 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \ 3087 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3088 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \ 3090 : "a"(&_argvec[0])__FRAME_POINTER \ 3091 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \ 3092 lval = (__typeof__( lval ))_res; \ 3095 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 3097 volatile OrigFn _orig = ( orig ); \ 3098 volatile unsigned long _argvec[3]; \ 3099 volatile unsigned long _res; \ 3100 _argvec[0] = (unsigned long)_orig.nraddr; \ 3101 _argvec[1] = (unsigned long)arg1; \ 3102 _argvec[2] = (unsigned long)arg2; \ 3103 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \ 3106 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3107 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \ 3109 : "a"(&_argvec[0])__FRAME_POINTER \ 3110 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \ 3111 lval = (__typeof__( lval ))_res; \ 3114 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 3116 volatile OrigFn _orig = ( orig ); \ 3117 volatile unsigned long _argvec[4]; \ 3118 volatile unsigned long _res; \ 3119 _argvec[0] = (unsigned long)_orig.nraddr; \ 3120 _argvec[1] = (unsigned long)arg1; \ 3121 _argvec[2] = (unsigned long)arg2; \ 3122 _argvec[3] = (unsigned long)arg3; \ 3123 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \ 3127 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3128 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \ 3130 : "a"(&_argvec[0])__FRAME_POINTER \ 3131 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \ 3132 lval = (__typeof__( lval ))_res; \ 3135 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 3137 volatile OrigFn _orig = ( orig ); \ 3138 volatile unsigned long _argvec[5]; \ 3139 volatile unsigned long _res; \ 3140 _argvec[0] = (unsigned long)_orig.nraddr; \ 3141 _argvec[1] = (unsigned long)arg1; \ 3142 _argvec[2] = (unsigned long)arg2; \ 3143 _argvec[3] = (unsigned long)arg3; \ 3144 _argvec[4] = (unsigned long)arg4; \ 3145 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \ 3150 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3151 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \ 3153 : "a"(&_argvec[0])__FRAME_POINTER \ 3154 : "cc", "memory", __CALLER_SAVED_REGS, "7" ); \ 3155 lval = (__typeof__( lval ))_res; \ 3158 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 3160 volatile OrigFn _orig = ( orig ); \ 3161 volatile unsigned long _argvec[6]; \ 3162 volatile unsigned long _res; \ 3163 _argvec[0] = (unsigned long)_orig.nraddr; \ 3164 _argvec[1] = (unsigned long)arg1; \ 3165 _argvec[2] = (unsigned long)arg2; \ 3166 _argvec[3] = (unsigned long)arg3; \ 3167 _argvec[4] = (unsigned long)arg4; \ 3168 _argvec[5] = (unsigned long)arg5; \ 3169 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \ 3175 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3176 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \ 3178 : "a"(&_argvec[0])__FRAME_POINTER \ 3179 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3180 lval = (__typeof__( lval ))_res; \ 3183 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 3185 volatile OrigFn _orig = ( orig ); \ 3186 volatile unsigned long _argvec[7]; \ 3187 volatile unsigned long _res; \ 3188 _argvec[0] = (unsigned long)_orig.nraddr; \ 3189 _argvec[1] = (unsigned long)arg1; \ 3190 _argvec[2] = (unsigned long)arg2; \ 3191 _argvec[3] = (unsigned long)arg3; \ 3192 _argvec[4] = (unsigned long)arg4; \ 3193 _argvec[5] = (unsigned long)arg5; \ 3194 _argvec[6] = (unsigned long)arg6; \ 3195 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-168\n\t" \ 3201 "mvc 160(8,15), 48(1)\n\t" \ 3202 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3203 "aghi 15,168\n\t" VALGRIND_CFI_EPILOGUE \ 3205 : "a"(&_argvec[0])__FRAME_POINTER \ 3206 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3207 lval = (__typeof__( lval ))_res; \ 3210 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 3212 volatile OrigFn _orig = ( orig ); \ 3213 volatile unsigned long _argvec[8]; \ 3214 volatile unsigned long _res; \ 3215 _argvec[0] = (unsigned long)_orig.nraddr; \ 3216 _argvec[1] = (unsigned long)arg1; \ 3217 _argvec[2] = (unsigned long)arg2; \ 3218 _argvec[3] = (unsigned long)arg3; \ 3219 _argvec[4] = (unsigned long)arg4; \ 3220 _argvec[5] = (unsigned long)arg5; \ 3221 _argvec[6] = (unsigned long)arg6; \ 3222 _argvec[7] = (unsigned long)arg7; \ 3223 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-176\n\t" \ 3229 "mvc 160(8,15), 48(1)\n\t" \ 3230 "mvc 168(8,15), 56(1)\n\t" \ 3231 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3232 "aghi 15,176\n\t" VALGRIND_CFI_EPILOGUE \ 3234 : "a"(&_argvec[0])__FRAME_POINTER \ 3235 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3236 lval = (__typeof__( lval ))_res; \ 3239 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 3241 volatile OrigFn _orig = ( orig ); \ 3242 volatile unsigned long _argvec[9]; \ 3243 volatile unsigned long _res; \ 3244 _argvec[0] = (unsigned long)_orig.nraddr; \ 3245 _argvec[1] = (unsigned long)arg1; \ 3246 _argvec[2] = (unsigned long)arg2; \ 3247 _argvec[3] = (unsigned long)arg3; \ 3248 _argvec[4] = (unsigned long)arg4; \ 3249 _argvec[5] = (unsigned long)arg5; \ 3250 _argvec[6] = (unsigned long)arg6; \ 3251 _argvec[7] = (unsigned long)arg7; \ 3252 _argvec[8] = (unsigned long)arg8; \ 3253 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-184\n\t" \ 3259 "mvc 160(8,15), 48(1)\n\t" \ 3260 "mvc 168(8,15), 56(1)\n\t" \ 3261 "mvc 176(8,15), 64(1)\n\t" \ 3262 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3263 "aghi 15,184\n\t" VALGRIND_CFI_EPILOGUE \ 3265 : "a"(&_argvec[0])__FRAME_POINTER \ 3266 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3267 lval = (__typeof__( lval ))_res; \ 3270 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 3272 volatile OrigFn _orig = ( orig ); \ 3273 volatile unsigned long _argvec[10]; \ 3274 volatile unsigned long _res; \ 3275 _argvec[0] = (unsigned long)_orig.nraddr; \ 3276 _argvec[1] = (unsigned long)arg1; \ 3277 _argvec[2] = (unsigned long)arg2; \ 3278 _argvec[3] = (unsigned long)arg3; \ 3279 _argvec[4] = (unsigned long)arg4; \ 3280 _argvec[5] = (unsigned long)arg5; \ 3281 _argvec[6] = (unsigned long)arg6; \ 3282 _argvec[7] = (unsigned long)arg7; \ 3283 _argvec[8] = (unsigned long)arg8; \ 3284 _argvec[9] = (unsigned long)arg9; \ 3285 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-192\n\t" \ 3291 "mvc 160(8,15), 48(1)\n\t" \ 3292 "mvc 168(8,15), 56(1)\n\t" \ 3293 "mvc 176(8,15), 64(1)\n\t" \ 3294 "mvc 184(8,15), 72(1)\n\t" \ 3295 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3296 "aghi 15,192\n\t" VALGRIND_CFI_EPILOGUE \ 3298 : "a"(&_argvec[0])__FRAME_POINTER \ 3299 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3300 lval = (__typeof__( lval ))_res; \ 3303 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 3305 volatile OrigFn _orig = ( orig ); \ 3306 volatile unsigned long _argvec[11]; \ 3307 volatile unsigned long _res; \ 3308 _argvec[0] = (unsigned long)_orig.nraddr; \ 3309 _argvec[1] = (unsigned long)arg1; \ 3310 _argvec[2] = (unsigned long)arg2; \ 3311 _argvec[3] = (unsigned long)arg3; \ 3312 _argvec[4] = (unsigned long)arg4; \ 3313 _argvec[5] = (unsigned long)arg5; \ 3314 _argvec[6] = (unsigned long)arg6; \ 3315 _argvec[7] = (unsigned long)arg7; \ 3316 _argvec[8] = (unsigned long)arg8; \ 3317 _argvec[9] = (unsigned long)arg9; \ 3318 _argvec[10] = (unsigned long)arg10; \ 3319 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-200\n\t" \ 3325 "mvc 160(8,15), 48(1)\n\t" \ 3326 "mvc 168(8,15), 56(1)\n\t" \ 3327 "mvc 176(8,15), 64(1)\n\t" \ 3328 "mvc 184(8,15), 72(1)\n\t" \ 3329 "mvc 192(8,15), 80(1)\n\t" \ 3330 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3331 "aghi 15,200\n\t" VALGRIND_CFI_EPILOGUE \ 3333 : "a"(&_argvec[0])__FRAME_POINTER \ 3334 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3335 lval = (__typeof__( lval ))_res; \ 3338 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 3340 volatile OrigFn _orig = ( orig ); \ 3341 volatile unsigned long _argvec[12]; \ 3342 volatile unsigned long _res; \ 3343 _argvec[0] = (unsigned long)_orig.nraddr; \ 3344 _argvec[1] = (unsigned long)arg1; \ 3345 _argvec[2] = (unsigned long)arg2; \ 3346 _argvec[3] = (unsigned long)arg3; \ 3347 _argvec[4] = (unsigned long)arg4; \ 3348 _argvec[5] = (unsigned long)arg5; \ 3349 _argvec[6] = (unsigned long)arg6; \ 3350 _argvec[7] = (unsigned long)arg7; \ 3351 _argvec[8] = (unsigned long)arg8; \ 3352 _argvec[9] = (unsigned long)arg9; \ 3353 _argvec[10] = (unsigned long)arg10; \ 3354 _argvec[11] = (unsigned long)arg11; \ 3355 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-208\n\t" \ 3361 "mvc 160(8,15), 48(1)\n\t" \ 3362 "mvc 168(8,15), 56(1)\n\t" \ 3363 "mvc 176(8,15), 64(1)\n\t" \ 3364 "mvc 184(8,15), 72(1)\n\t" \ 3365 "mvc 192(8,15), 80(1)\n\t" \ 3366 "mvc 200(8,15), 88(1)\n\t" \ 3367 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3368 "aghi 15,208\n\t" VALGRIND_CFI_EPILOGUE \ 3370 : "a"(&_argvec[0])__FRAME_POINTER \ 3371 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3372 lval = (__typeof__( lval ))_res; \ 3375 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 3377 volatile OrigFn _orig = ( orig ); \ 3378 volatile unsigned long _argvec[13]; \ 3379 volatile unsigned long _res; \ 3380 _argvec[0] = (unsigned long)_orig.nraddr; \ 3381 _argvec[1] = (unsigned long)arg1; \ 3382 _argvec[2] = (unsigned long)arg2; \ 3383 _argvec[3] = (unsigned long)arg3; \ 3384 _argvec[4] = (unsigned long)arg4; \ 3385 _argvec[5] = (unsigned long)arg5; \ 3386 _argvec[6] = (unsigned long)arg6; \ 3387 _argvec[7] = (unsigned long)arg7; \ 3388 _argvec[8] = (unsigned long)arg8; \ 3389 _argvec[9] = (unsigned long)arg9; \ 3390 _argvec[10] = (unsigned long)arg10; \ 3391 _argvec[11] = (unsigned long)arg11; \ 3392 _argvec[12] = (unsigned long)arg12; \ 3393 __asm__ volatile( VALGRIND_CFI_PROLOGUE "aghi 15,-216\n\t" \ 3399 "mvc 160(8,15), 48(1)\n\t" \ 3400 "mvc 168(8,15), 56(1)\n\t" \ 3401 "mvc 176(8,15), 64(1)\n\t" \ 3402 "mvc 184(8,15), 72(1)\n\t" \ 3403 "mvc 192(8,15), 80(1)\n\t" \ 3404 "mvc 200(8,15), 88(1)\n\t" \ 3405 "mvc 208(8,15), 96(1)\n\t" \ 3406 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \ 3407 "aghi 15,216\n\t" VALGRIND_CFI_EPILOGUE \ 3409 : "a"(&_argvec[0])__FRAME_POINTER \ 3410 : "cc", "memory", __CALLER_SAVED_REGS, "6", "7" ); \ 3411 lval = (__typeof__( lval ))_res; \ 3418 #if defined( PLAT_mips32_linux ) 3421 #define __CALLER_SAVED_REGS \ 3422 "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31" 3427 #define CALL_FN_W_v( lval, orig ) \ 3429 volatile OrigFn _orig = ( orig ); \ 3430 volatile unsigned long _argvec[1]; \ 3431 volatile unsigned long _res; \ 3432 _argvec[0] = (unsigned long)_orig.nraddr; \ 3433 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3434 "sw $28, 0($29) \n\t" \ 3435 "sw $31, 4($29) \n\t" \ 3436 "subu $29, $29, 16 \n\t" \ 3437 "lw $25, 0(%1) \n\t" \ 3438 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16\n\t" \ 3439 "lw $28, 0($29) \n\t" \ 3440 "lw $31, 4($29) \n\t" \ 3441 "addu $29, $29, 8 \n\t" \ 3444 : "0"( &_argvec[0] ) \ 3445 : "memory", __CALLER_SAVED_REGS ); \ 3446 lval = (__typeof__( lval ))_res; \ 3449 #define CALL_FN_W_W( lval, orig, arg1 ) \ 3451 volatile OrigFn _orig = ( orig ); \ 3452 volatile unsigned long _argvec[2]; \ 3453 volatile unsigned long _res; \ 3454 _argvec[0] = (unsigned long)_orig.nraddr; \ 3455 _argvec[1] = (unsigned long)( arg1 ); \ 3456 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3457 "sw $28, 0($29) \n\t" \ 3458 "sw $31, 4($29) \n\t" \ 3459 "subu $29, $29, 16 \n\t" \ 3460 "lw $4, 4(%1) \n\t" \ 3461 "lw $25, 0(%1) \n\t" \ 3462 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \ 3463 "lw $28, 0($29) \n\t" \ 3464 "lw $31, 4($29) \n\t" \ 3465 "addu $29, $29, 8 \n\t" \ 3468 : "0"( &_argvec[0] ) \ 3469 : "memory", __CALLER_SAVED_REGS ); \ 3470 lval = (__typeof__( lval ))_res; \ 3473 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 3475 volatile OrigFn _orig = ( orig ); \ 3476 volatile unsigned long _argvec[3]; \ 3477 volatile unsigned long _res; \ 3478 _argvec[0] = (unsigned long)_orig.nraddr; \ 3479 _argvec[1] = (unsigned long)( arg1 ); \ 3480 _argvec[2] = (unsigned long)( arg2 ); \ 3481 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3482 "sw $28, 0($29) \n\t" \ 3483 "sw $31, 4($29) \n\t" \ 3484 "subu $29, $29, 16 \n\t" \ 3485 "lw $4, 4(%1) \n\t" \ 3486 "lw $5, 8(%1) \n\t" \ 3487 "lw $25, 0(%1) \n\t" \ 3488 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \ 3489 "lw $28, 0($29) \n\t" \ 3490 "lw $31, 4($29) \n\t" \ 3491 "addu $29, $29, 8 \n\t" \ 3494 : "0"( &_argvec[0] ) \ 3495 : "memory", __CALLER_SAVED_REGS ); \ 3496 lval = (__typeof__( lval ))_res; \ 3499 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 3501 volatile OrigFn _orig = ( orig ); \ 3502 volatile unsigned long _argvec[4]; \ 3503 volatile unsigned long _res; \ 3504 _argvec[0] = (unsigned long)_orig.nraddr; \ 3505 _argvec[1] = (unsigned long)( arg1 ); \ 3506 _argvec[2] = (unsigned long)( arg2 ); \ 3507 _argvec[3] = (unsigned long)( arg3 ); \ 3508 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3509 "sw $28, 0($29) \n\t" \ 3510 "sw $31, 4($29) \n\t" \ 3511 "subu $29, $29, 16 \n\t" \ 3512 "lw $4, 4(%1) \n\t" \ 3513 "lw $5, 8(%1) \n\t" \ 3514 "lw $6, 12(%1) \n\t" \ 3515 "lw $25, 0(%1) \n\t" \ 3516 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \ 3517 "lw $28, 0($29) \n\t" \ 3518 "lw $31, 4($29) \n\t" \ 3519 "addu $29, $29, 8 \n\t" \ 3522 : "0"( &_argvec[0] ) \ 3523 : "memory", __CALLER_SAVED_REGS ); \ 3524 lval = (__typeof__( lval ))_res; \ 3527 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 3529 volatile OrigFn _orig = ( orig ); \ 3530 volatile unsigned long _argvec[5]; \ 3531 volatile unsigned long _res; \ 3532 _argvec[0] = (unsigned long)_orig.nraddr; \ 3533 _argvec[1] = (unsigned long)( arg1 ); \ 3534 _argvec[2] = (unsigned long)( arg2 ); \ 3535 _argvec[3] = (unsigned long)( arg3 ); \ 3536 _argvec[4] = (unsigned long)( arg4 ); \ 3537 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3538 "sw $28, 0($29) \n\t" \ 3539 "sw $31, 4($29) \n\t" \ 3540 "subu $29, $29, 16 \n\t" \ 3541 "lw $4, 4(%1) \n\t" \ 3542 "lw $5, 8(%1) \n\t" \ 3543 "lw $6, 12(%1) \n\t" \ 3544 "lw $7, 16(%1) \n\t" \ 3545 "lw $25, 0(%1) \n\t" \ 3546 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \ 3547 "lw $28, 0($29) \n\t" \ 3548 "lw $31, 4($29) \n\t" \ 3549 "addu $29, $29, 8 \n\t" \ 3552 : "0"( &_argvec[0] ) \ 3553 : "memory", __CALLER_SAVED_REGS ); \ 3554 lval = (__typeof__( lval ))_res; \ 3557 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 3559 volatile OrigFn _orig = ( orig ); \ 3560 volatile unsigned long _argvec[6]; \ 3561 volatile unsigned long _res; \ 3562 _argvec[0] = (unsigned long)_orig.nraddr; \ 3563 _argvec[1] = (unsigned long)( arg1 ); \ 3564 _argvec[2] = (unsigned long)( arg2 ); \ 3565 _argvec[3] = (unsigned long)( arg3 ); \ 3566 _argvec[4] = (unsigned long)( arg4 ); \ 3567 _argvec[5] = (unsigned long)( arg5 ); \ 3568 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3569 "sw $28, 0($29) \n\t" \ 3570 "sw $31, 4($29) \n\t" \ 3571 "lw $4, 20(%1) \n\t" \ 3572 "subu $29, $29, 24\n\t" \ 3573 "sw $4, 16($29) \n\t" \ 3574 "lw $4, 4(%1) \n\t" \ 3575 "lw $5, 8(%1) \n\t" \ 3576 "lw $6, 12(%1) \n\t" \ 3577 "lw $7, 16(%1) \n\t" \ 3578 "lw $25, 0(%1) \n\t" \ 3579 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 24 \n\t" \ 3580 "lw $28, 0($29) \n\t" \ 3581 "lw $31, 4($29) \n\t" \ 3582 "addu $29, $29, 8 \n\t" \ 3585 : "0"( &_argvec[0] ) \ 3586 : "memory", __CALLER_SAVED_REGS ); \ 3587 lval = (__typeof__( lval ))_res; \ 3589 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 3591 volatile OrigFn _orig = ( orig ); \ 3592 volatile unsigned long _argvec[7]; \ 3593 volatile unsigned long _res; \ 3594 _argvec[0] = (unsigned long)_orig.nraddr; \ 3595 _argvec[1] = (unsigned long)( arg1 ); \ 3596 _argvec[2] = (unsigned long)( arg2 ); \ 3597 _argvec[3] = (unsigned long)( arg3 ); \ 3598 _argvec[4] = (unsigned long)( arg4 ); \ 3599 _argvec[5] = (unsigned long)( arg5 ); \ 3600 _argvec[6] = (unsigned long)( arg6 ); \ 3601 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3602 "sw $28, 0($29) \n\t" \ 3603 "sw $31, 4($29) \n\t" \ 3604 "lw $4, 20(%1) \n\t" \ 3605 "subu $29, $29, 32\n\t" \ 3606 "sw $4, 16($29) \n\t" \ 3607 "lw $4, 24(%1) \n\t" \ 3609 "sw $4, 20($29) \n\t" \ 3610 "lw $4, 4(%1) \n\t" \ 3611 "lw $5, 8(%1) \n\t" \ 3612 "lw $6, 12(%1) \n\t" \ 3613 "lw $7, 16(%1) \n\t" \ 3614 "lw $25, 0(%1) \n\t" \ 3615 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \ 3616 "lw $28, 0($29) \n\t" \ 3617 "lw $31, 4($29) \n\t" \ 3618 "addu $29, $29, 8 \n\t" \ 3621 : "0"( &_argvec[0] ) \ 3622 : "memory", __CALLER_SAVED_REGS ); \ 3623 lval = (__typeof__( lval ))_res; \ 3626 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 3628 volatile OrigFn _orig = ( orig ); \ 3629 volatile unsigned long _argvec[8]; \ 3630 volatile unsigned long _res; \ 3631 _argvec[0] = (unsigned long)_orig.nraddr; \ 3632 _argvec[1] = (unsigned long)( arg1 ); \ 3633 _argvec[2] = (unsigned long)( arg2 ); \ 3634 _argvec[3] = (unsigned long)( arg3 ); \ 3635 _argvec[4] = (unsigned long)( arg4 ); \ 3636 _argvec[5] = (unsigned long)( arg5 ); \ 3637 _argvec[6] = (unsigned long)( arg6 ); \ 3638 _argvec[7] = (unsigned long)( arg7 ); \ 3639 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3640 "sw $28, 0($29) \n\t" \ 3641 "sw $31, 4($29) \n\t" \ 3642 "lw $4, 20(%1) \n\t" \ 3643 "subu $29, $29, 32\n\t" \ 3644 "sw $4, 16($29) \n\t" \ 3645 "lw $4, 24(%1) \n\t" \ 3646 "sw $4, 20($29) \n\t" \ 3647 "lw $4, 28(%1) \n\t" \ 3648 "sw $4, 24($29) \n\t" \ 3649 "lw $4, 4(%1) \n\t" \ 3650 "lw $5, 8(%1) \n\t" \ 3651 "lw $6, 12(%1) \n\t" \ 3652 "lw $7, 16(%1) \n\t" \ 3653 "lw $25, 0(%1) \n\t" \ 3654 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 32 \n\t" \ 3655 "lw $28, 0($29) \n\t" \ 3656 "lw $31, 4($29) \n\t" \ 3657 "addu $29, $29, 8 \n\t" \ 3660 : "0"( &_argvec[0] ) \ 3661 : "memory", __CALLER_SAVED_REGS ); \ 3662 lval = (__typeof__( lval ))_res; \ 3665 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 3667 volatile OrigFn _orig = ( orig ); \ 3668 volatile unsigned long _argvec[9]; \ 3669 volatile unsigned long _res; \ 3670 _argvec[0] = (unsigned long)_orig.nraddr; \ 3671 _argvec[1] = (unsigned long)( arg1 ); \ 3672 _argvec[2] = (unsigned long)( arg2 ); \ 3673 _argvec[3] = (unsigned long)( arg3 ); \ 3674 _argvec[4] = (unsigned long)( arg4 ); \ 3675 _argvec[5] = (unsigned long)( arg5 ); \ 3676 _argvec[6] = (unsigned long)( arg6 ); \ 3677 _argvec[7] = (unsigned long)( arg7 ); \ 3678 _argvec[8] = (unsigned long)( arg8 ); \ 3679 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3680 "sw $28, 0($29) \n\t" \ 3681 "sw $31, 4($29) \n\t" \ 3682 "lw $4, 20(%1) \n\t" \ 3683 "subu $29, $29, 40\n\t" \ 3684 "sw $4, 16($29) \n\t" \ 3685 "lw $4, 24(%1) \n\t" \ 3686 "sw $4, 20($29) \n\t" \ 3687 "lw $4, 28(%1) \n\t" \ 3688 "sw $4, 24($29) \n\t" \ 3689 "lw $4, 32(%1) \n\t" \ 3690 "sw $4, 28($29) \n\t" \ 3691 "lw $4, 4(%1) \n\t" \ 3692 "lw $5, 8(%1) \n\t" \ 3693 "lw $6, 12(%1) \n\t" \ 3694 "lw $7, 16(%1) \n\t" \ 3695 "lw $25, 0(%1) \n\t" \ 3696 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \ 3697 "lw $28, 0($29) \n\t" \ 3698 "lw $31, 4($29) \n\t" \ 3699 "addu $29, $29, 8 \n\t" \ 3702 : "0"( &_argvec[0] ) \ 3703 : "memory", __CALLER_SAVED_REGS ); \ 3704 lval = (__typeof__( lval ))_res; \ 3707 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 3709 volatile OrigFn _orig = ( orig ); \ 3710 volatile unsigned long _argvec[10]; \ 3711 volatile unsigned long _res; \ 3712 _argvec[0] = (unsigned long)_orig.nraddr; \ 3713 _argvec[1] = (unsigned long)( arg1 ); \ 3714 _argvec[2] = (unsigned long)( arg2 ); \ 3715 _argvec[3] = (unsigned long)( arg3 ); \ 3716 _argvec[4] = (unsigned long)( arg4 ); \ 3717 _argvec[5] = (unsigned long)( arg5 ); \ 3718 _argvec[6] = (unsigned long)( arg6 ); \ 3719 _argvec[7] = (unsigned long)( arg7 ); \ 3720 _argvec[8] = (unsigned long)( arg8 ); \ 3721 _argvec[9] = (unsigned long)( arg9 ); \ 3722 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3723 "sw $28, 0($29) \n\t" \ 3724 "sw $31, 4($29) \n\t" \ 3725 "lw $4, 20(%1) \n\t" \ 3726 "subu $29, $29, 40\n\t" \ 3727 "sw $4, 16($29) \n\t" \ 3728 "lw $4, 24(%1) \n\t" \ 3729 "sw $4, 20($29) \n\t" \ 3730 "lw $4, 28(%1) \n\t" \ 3731 "sw $4, 24($29) \n\t" \ 3732 "lw $4, 32(%1) \n\t" \ 3733 "sw $4, 28($29) \n\t" \ 3734 "lw $4, 36(%1) \n\t" \ 3735 "sw $4, 32($29) \n\t" \ 3736 "lw $4, 4(%1) \n\t" \ 3737 "lw $5, 8(%1) \n\t" \ 3738 "lw $6, 12(%1) \n\t" \ 3739 "lw $7, 16(%1) \n\t" \ 3740 "lw $25, 0(%1) \n\t" \ 3741 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 40 \n\t" \ 3742 "lw $28, 0($29) \n\t" \ 3743 "lw $31, 4($29) \n\t" \ 3744 "addu $29, $29, 8 \n\t" \ 3747 : "0"( &_argvec[0] ) \ 3748 : "memory", __CALLER_SAVED_REGS ); \ 3749 lval = (__typeof__( lval ))_res; \ 3752 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 3754 volatile OrigFn _orig = ( orig ); \ 3755 volatile unsigned long _argvec[11]; \ 3756 volatile unsigned long _res; \ 3757 _argvec[0] = (unsigned long)_orig.nraddr; \ 3758 _argvec[1] = (unsigned long)( arg1 ); \ 3759 _argvec[2] = (unsigned long)( arg2 ); \ 3760 _argvec[3] = (unsigned long)( arg3 ); \ 3761 _argvec[4] = (unsigned long)( arg4 ); \ 3762 _argvec[5] = (unsigned long)( arg5 ); \ 3763 _argvec[6] = (unsigned long)( arg6 ); \ 3764 _argvec[7] = (unsigned long)( arg7 ); \ 3765 _argvec[8] = (unsigned long)( arg8 ); \ 3766 _argvec[9] = (unsigned long)( arg9 ); \ 3767 _argvec[10] = (unsigned long)( arg10 ); \ 3768 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3769 "sw $28, 0($29) \n\t" \ 3770 "sw $31, 4($29) \n\t" \ 3771 "lw $4, 20(%1) \n\t" \ 3772 "subu $29, $29, 48\n\t" \ 3773 "sw $4, 16($29) \n\t" \ 3774 "lw $4, 24(%1) \n\t" \ 3775 "sw $4, 20($29) \n\t" \ 3776 "lw $4, 28(%1) \n\t" \ 3777 "sw $4, 24($29) \n\t" \ 3778 "lw $4, 32(%1) \n\t" \ 3779 "sw $4, 28($29) \n\t" \ 3780 "lw $4, 36(%1) \n\t" \ 3781 "sw $4, 32($29) \n\t" \ 3782 "lw $4, 40(%1) \n\t" \ 3783 "sw $4, 36($29) \n\t" \ 3784 "lw $4, 4(%1) \n\t" \ 3785 "lw $5, 8(%1) \n\t" \ 3786 "lw $6, 12(%1) \n\t" \ 3787 "lw $7, 16(%1) \n\t" \ 3788 "lw $25, 0(%1) \n\t" \ 3789 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \ 3790 "lw $28, 0($29) \n\t" \ 3791 "lw $31, 4($29) \n\t" \ 3792 "addu $29, $29, 8 \n\t" \ 3795 : "0"( &_argvec[0] ) \ 3796 : "memory", __CALLER_SAVED_REGS ); \ 3797 lval = (__typeof__( lval ))_res; \ 3800 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 3802 volatile OrigFn _orig = ( orig ); \ 3803 volatile unsigned long _argvec[12]; \ 3804 volatile unsigned long _res; \ 3805 _argvec[0] = (unsigned long)_orig.nraddr; \ 3806 _argvec[1] = (unsigned long)( arg1 ); \ 3807 _argvec[2] = (unsigned long)( arg2 ); \ 3808 _argvec[3] = (unsigned long)( arg3 ); \ 3809 _argvec[4] = (unsigned long)( arg4 ); \ 3810 _argvec[5] = (unsigned long)( arg5 ); \ 3811 _argvec[6] = (unsigned long)( arg6 ); \ 3812 _argvec[7] = (unsigned long)( arg7 ); \ 3813 _argvec[8] = (unsigned long)( arg8 ); \ 3814 _argvec[9] = (unsigned long)( arg9 ); \ 3815 _argvec[10] = (unsigned long)( arg10 ); \ 3816 _argvec[11] = (unsigned long)( arg11 ); \ 3817 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3818 "sw $28, 0($29) \n\t" \ 3819 "sw $31, 4($29) \n\t" \ 3820 "lw $4, 20(%1) \n\t" \ 3821 "subu $29, $29, 48\n\t" \ 3822 "sw $4, 16($29) \n\t" \ 3823 "lw $4, 24(%1) \n\t" \ 3824 "sw $4, 20($29) \n\t" \ 3825 "lw $4, 28(%1) \n\t" \ 3826 "sw $4, 24($29) \n\t" \ 3827 "lw $4, 32(%1) \n\t" \ 3828 "sw $4, 28($29) \n\t" \ 3829 "lw $4, 36(%1) \n\t" \ 3830 "sw $4, 32($29) \n\t" \ 3831 "lw $4, 40(%1) \n\t" \ 3832 "sw $4, 36($29) \n\t" \ 3833 "lw $4, 44(%1) \n\t" \ 3834 "sw $4, 40($29) \n\t" \ 3835 "lw $4, 4(%1) \n\t" \ 3836 "lw $5, 8(%1) \n\t" \ 3837 "lw $6, 12(%1) \n\t" \ 3838 "lw $7, 16(%1) \n\t" \ 3839 "lw $25, 0(%1) \n\t" \ 3840 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 48 \n\t" \ 3841 "lw $28, 0($29) \n\t" \ 3842 "lw $31, 4($29) \n\t" \ 3843 "addu $29, $29, 8 \n\t" \ 3846 : "0"( &_argvec[0] ) \ 3847 : "memory", __CALLER_SAVED_REGS ); \ 3848 lval = (__typeof__( lval ))_res; \ 3851 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 3853 volatile OrigFn _orig = ( orig ); \ 3854 volatile unsigned long _argvec[13]; \ 3855 volatile unsigned long _res; \ 3856 _argvec[0] = (unsigned long)_orig.nraddr; \ 3857 _argvec[1] = (unsigned long)( arg1 ); \ 3858 _argvec[2] = (unsigned long)( arg2 ); \ 3859 _argvec[3] = (unsigned long)( arg3 ); \ 3860 _argvec[4] = (unsigned long)( arg4 ); \ 3861 _argvec[5] = (unsigned long)( arg5 ); \ 3862 _argvec[6] = (unsigned long)( arg6 ); \ 3863 _argvec[7] = (unsigned long)( arg7 ); \ 3864 _argvec[8] = (unsigned long)( arg8 ); \ 3865 _argvec[9] = (unsigned long)( arg9 ); \ 3866 _argvec[10] = (unsigned long)( arg10 ); \ 3867 _argvec[11] = (unsigned long)( arg11 ); \ 3868 _argvec[12] = (unsigned long)( arg12 ); \ 3869 __asm__ volatile( "subu $29, $29, 8 \n\t" \ 3870 "sw $28, 0($29) \n\t" \ 3871 "sw $31, 4($29) \n\t" \ 3872 "lw $4, 20(%1) \n\t" \ 3873 "subu $29, $29, 56\n\t" \ 3874 "sw $4, 16($29) \n\t" \ 3875 "lw $4, 24(%1) \n\t" \ 3876 "sw $4, 20($29) \n\t" \ 3877 "lw $4, 28(%1) \n\t" \ 3878 "sw $4, 24($29) \n\t" \ 3879 "lw $4, 32(%1) \n\t" \ 3880 "sw $4, 28($29) \n\t" \ 3881 "lw $4, 36(%1) \n\t" \ 3882 "sw $4, 32($29) \n\t" \ 3883 "lw $4, 40(%1) \n\t" \ 3884 "sw $4, 36($29) \n\t" \ 3885 "lw $4, 44(%1) \n\t" \ 3886 "sw $4, 40($29) \n\t" \ 3887 "lw $4, 48(%1) \n\t" \ 3888 "sw $4, 44($29) \n\t" \ 3889 "lw $4, 4(%1) \n\t" \ 3890 "lw $5, 8(%1) \n\t" \ 3891 "lw $6, 12(%1) \n\t" \ 3892 "lw $7, 16(%1) \n\t" \ 3893 "lw $25, 0(%1) \n\t" \ 3894 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 56 \n\t" \ 3895 "lw $28, 0($29) \n\t" \ 3896 "lw $31, 4($29) \n\t" \ 3897 "addu $29, $29, 8 \n\t" \ 3900 : "r"( &_argvec[0] ) \ 3901 : "memory", __CALLER_SAVED_REGS ); \ 3902 lval = (__typeof__( lval ))_res; \ 3909 #if defined( PLAT_mips64_linux ) 3912 #define __CALLER_SAVED_REGS \ 3913 "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31" 3918 #define CALL_FN_W_v( lval, orig ) \ 3920 volatile OrigFn _orig = ( orig ); \ 3921 volatile unsigned long _argvec[1]; \ 3922 volatile unsigned long _res; \ 3923 _argvec[0] = (unsigned long)_orig.nraddr; \ 3924 __asm__ volatile( "ld $25, 0(%1)\n\t" \ 3925 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 3927 : "0"( &_argvec[0] ) \ 3928 : "memory", __CALLER_SAVED_REGS ); \ 3929 lval = (__typeof__( lval ))_res; \ 3932 #define CALL_FN_W_W( lval, orig, arg1 ) \ 3934 volatile OrigFn _orig = ( orig ); \ 3935 volatile unsigned long _argvec[2]; \ 3936 volatile unsigned long _res; \ 3937 _argvec[0] = (unsigned long)_orig.nraddr; \ 3938 _argvec[1] = (unsigned long)( arg1 ); \ 3939 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 3940 "ld $25, 0(%1)\n\t" \ 3941 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 3943 : "r"( &_argvec[0] ) \ 3944 : "memory", __CALLER_SAVED_REGS ); \ 3945 lval = (__typeof__( lval ))_res; \ 3948 #define CALL_FN_W_WW( lval, orig, arg1, arg2 ) \ 3950 volatile OrigFn _orig = ( orig ); \ 3951 volatile unsigned long _argvec[3]; \ 3952 volatile unsigned long _res; \ 3953 _argvec[0] = (unsigned long)_orig.nraddr; \ 3954 _argvec[1] = (unsigned long)( arg1 ); \ 3955 _argvec[2] = (unsigned long)( arg2 ); \ 3956 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 3957 "ld $5, 16(%1)\n\t" \ 3958 "ld $25, 0(%1)\n\t" \ 3959 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 3961 : "r"( &_argvec[0] ) \ 3962 : "memory", __CALLER_SAVED_REGS ); \ 3963 lval = (__typeof__( lval ))_res; \ 3966 #define CALL_FN_W_WWW( lval, orig, arg1, arg2, arg3 ) \ 3968 volatile OrigFn _orig = ( orig ); \ 3969 volatile unsigned long _argvec[4]; \ 3970 volatile unsigned long _res; \ 3971 _argvec[0] = (unsigned long)_orig.nraddr; \ 3972 _argvec[1] = (unsigned long)( arg1 ); \ 3973 _argvec[2] = (unsigned long)( arg2 ); \ 3974 _argvec[3] = (unsigned long)( arg3 ); \ 3975 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 3976 "ld $5, 16(%1)\n\t" \ 3977 "ld $6, 24(%1)\n\t" \ 3978 "ld $25, 0(%1)\n\t" \ 3979 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 3981 : "r"( &_argvec[0] ) \ 3982 : "memory", __CALLER_SAVED_REGS ); \ 3983 lval = (__typeof__( lval ))_res; \ 3986 #define CALL_FN_W_WWWW( lval, orig, arg1, arg2, arg3, arg4 ) \ 3988 volatile OrigFn _orig = ( orig ); \ 3989 volatile unsigned long _argvec[5]; \ 3990 volatile unsigned long _res; \ 3991 _argvec[0] = (unsigned long)_orig.nraddr; \ 3992 _argvec[1] = (unsigned long)( arg1 ); \ 3993 _argvec[2] = (unsigned long)( arg2 ); \ 3994 _argvec[3] = (unsigned long)( arg3 ); \ 3995 _argvec[4] = (unsigned long)( arg4 ); \ 3996 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 3997 "ld $5, 16(%1)\n\t" \ 3998 "ld $6, 24(%1)\n\t" \ 3999 "ld $7, 32(%1)\n\t" \ 4000 "ld $25, 0(%1)\n\t" \ 4001 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 4003 : "r"( &_argvec[0] ) \ 4004 : "memory", __CALLER_SAVED_REGS ); \ 4005 lval = (__typeof__( lval ))_res; \ 4008 #define CALL_FN_W_5W( lval, orig, arg1, arg2, arg3, arg4, arg5 ) \ 4010 volatile OrigFn _orig = ( orig ); \ 4011 volatile unsigned long _argvec[6]; \ 4012 volatile unsigned long _res; \ 4013 _argvec[0] = (unsigned long)_orig.nraddr; \ 4014 _argvec[1] = (unsigned long)( arg1 ); \ 4015 _argvec[2] = (unsigned long)( arg2 ); \ 4016 _argvec[3] = (unsigned long)( arg3 ); \ 4017 _argvec[4] = (unsigned long)( arg4 ); \ 4018 _argvec[5] = (unsigned long)( arg5 ); \ 4019 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 4020 "ld $5, 16(%1)\n\t" \ 4021 "ld $6, 24(%1)\n\t" \ 4022 "ld $7, 32(%1)\n\t" \ 4023 "ld $8, 40(%1)\n\t" \ 4024 "ld $25, 0(%1)\n\t" \ 4025 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 4027 : "r"( &_argvec[0] ) \ 4028 : "memory", __CALLER_SAVED_REGS ); \ 4029 lval = (__typeof__( lval ))_res; \ 4032 #define CALL_FN_W_6W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6 ) \ 4034 volatile OrigFn _orig = ( orig ); \ 4035 volatile unsigned long _argvec[7]; \ 4036 volatile unsigned long _res; \ 4037 _argvec[0] = (unsigned long)_orig.nraddr; \ 4038 _argvec[1] = (unsigned long)( arg1 ); \ 4039 _argvec[2] = (unsigned long)( arg2 ); \ 4040 _argvec[3] = (unsigned long)( arg3 ); \ 4041 _argvec[4] = (unsigned long)( arg4 ); \ 4042 _argvec[5] = (unsigned long)( arg5 ); \ 4043 _argvec[6] = (unsigned long)( arg6 ); \ 4044 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 4045 "ld $5, 16(%1)\n\t" \ 4046 "ld $6, 24(%1)\n\t" \ 4047 "ld $7, 32(%1)\n\t" \ 4048 "ld $8, 40(%1)\n\t" \ 4049 "ld $9, 48(%1)\n\t" \ 4050 "ld $25, 0(%1)\n\t" \ 4051 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 4053 : "r"( &_argvec[0] ) \ 4054 : "memory", __CALLER_SAVED_REGS ); \ 4055 lval = (__typeof__( lval ))_res; \ 4058 #define CALL_FN_W_7W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7 ) \ 4060 volatile OrigFn _orig = ( orig ); \ 4061 volatile unsigned long _argvec[8]; \ 4062 volatile unsigned long _res; \ 4063 _argvec[0] = (unsigned long)_orig.nraddr; \ 4064 _argvec[1] = (unsigned long)( arg1 ); \ 4065 _argvec[2] = (unsigned long)( arg2 ); \ 4066 _argvec[3] = (unsigned long)( arg3 ); \ 4067 _argvec[4] = (unsigned long)( arg4 ); \ 4068 _argvec[5] = (unsigned long)( arg5 ); \ 4069 _argvec[6] = (unsigned long)( arg6 ); \ 4070 _argvec[7] = (unsigned long)( arg7 ); \ 4071 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 4072 "ld $5, 16(%1)\n\t" \ 4073 "ld $6, 24(%1)\n\t" \ 4074 "ld $7, 32(%1)\n\t" \ 4075 "ld $8, 40(%1)\n\t" \ 4076 "ld $9, 48(%1)\n\t" \ 4077 "ld $10, 56(%1)\n\t" \ 4078 "ld $25, 0(%1) \n\t" \ 4079 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 4081 : "r"( &_argvec[0] ) \ 4082 : "memory", __CALLER_SAVED_REGS ); \ 4083 lval = (__typeof__( lval ))_res; \ 4086 #define CALL_FN_W_8W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 ) \ 4088 volatile OrigFn _orig = ( orig ); \ 4089 volatile unsigned long _argvec[9]; \ 4090 volatile unsigned long _res; \ 4091 _argvec[0] = (unsigned long)_orig.nraddr; \ 4092 _argvec[1] = (unsigned long)( arg1 ); \ 4093 _argvec[2] = (unsigned long)( arg2 ); \ 4094 _argvec[3] = (unsigned long)( arg3 ); \ 4095 _argvec[4] = (unsigned long)( arg4 ); \ 4096 _argvec[5] = (unsigned long)( arg5 ); \ 4097 _argvec[6] = (unsigned long)( arg6 ); \ 4098 _argvec[7] = (unsigned long)( arg7 ); \ 4099 _argvec[8] = (unsigned long)( arg8 ); \ 4100 __asm__ volatile( "ld $4, 8(%1)\n\t" \ 4101 "ld $5, 16(%1)\n\t" \ 4102 "ld $6, 24(%1)\n\t" \ 4103 "ld $7, 32(%1)\n\t" \ 4104 "ld $8, 40(%1)\n\t" \ 4105 "ld $9, 48(%1)\n\t" \ 4106 "ld $10, 56(%1)\n\t" \ 4107 "ld $11, 64(%1)\n\t" \ 4108 "ld $25, 0(%1) \n\t" \ 4109 VALGRIND_CALL_NOREDIR_T9 "move %0, $2\n" \ 4111 : "r"( &_argvec[0] ) \ 4112 : "memory", __CALLER_SAVED_REGS ); \ 4113 lval = (__typeof__( lval ))_res; \ 4116 #define CALL_FN_W_9W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 ) \ 4118 volatile OrigFn _orig = ( orig ); \ 4119 volatile unsigned long _argvec[10]; \ 4120 volatile unsigned long _res; \ 4121 _argvec[0] = (unsigned long)_orig.nraddr; \ 4122 _argvec[1] = (unsigned long)( arg1 ); \ 4123 _argvec[2] = (unsigned long)( arg2 ); \ 4124 _argvec[3] = (unsigned long)( arg3 ); \ 4125 _argvec[4] = (unsigned long)( arg4 ); \ 4126 _argvec[5] = (unsigned long)( arg5 ); \ 4127 _argvec[6] = (unsigned long)( arg6 ); \ 4128 _argvec[7] = (unsigned long)( arg7 ); \ 4129 _argvec[8] = (unsigned long)( arg8 ); \ 4130 _argvec[9] = (unsigned long)( arg9 ); \ 4131 __asm__ volatile( "dsubu $29, $29, 8\n\t" \ 4132 "ld $4, 72(%1)\n\t" \ 4133 "sd $4, 0($29)\n\t" \ 4134 "ld $4, 8(%1)\n\t" \ 4135 "ld $5, 16(%1)\n\t" \ 4136 "ld $6, 24(%1)\n\t" \ 4137 "ld $7, 32(%1)\n\t" \ 4138 "ld $8, 40(%1)\n\t" \ 4139 "ld $9, 48(%1)\n\t" \ 4140 "ld $10, 56(%1)\n\t" \ 4141 "ld $11, 64(%1)\n\t" \ 4142 "ld $25, 0(%1)\n\t" \ 4143 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 8\n\t" \ 4146 : "r"( &_argvec[0] ) \ 4147 : "memory", __CALLER_SAVED_REGS ); \ 4148 lval = (__typeof__( lval ))_res; \ 4151 #define CALL_FN_W_10W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 ) \ 4153 volatile OrigFn _orig = ( orig ); \ 4154 volatile unsigned long _argvec[11]; \ 4155 volatile unsigned long _res; \ 4156 _argvec[0] = (unsigned long)_orig.nraddr; \ 4157 _argvec[1] = (unsigned long)( arg1 ); \ 4158 _argvec[2] = (unsigned long)( arg2 ); \ 4159 _argvec[3] = (unsigned long)( arg3 ); \ 4160 _argvec[4] = (unsigned long)( arg4 ); \ 4161 _argvec[5] = (unsigned long)( arg5 ); \ 4162 _argvec[6] = (unsigned long)( arg6 ); \ 4163 _argvec[7] = (unsigned long)( arg7 ); \ 4164 _argvec[8] = (unsigned long)( arg8 ); \ 4165 _argvec[9] = (unsigned long)( arg9 ); \ 4166 _argvec[10] = (unsigned long)( arg10 ); \ 4167 __asm__ volatile( "dsubu $29, $29, 16\n\t" \ 4168 "ld $4, 72(%1)\n\t" \ 4169 "sd $4, 0($29)\n\t" \ 4170 "ld $4, 80(%1)\n\t" \ 4171 "sd $4, 8($29)\n\t" \ 4172 "ld $4, 8(%1)\n\t" \ 4173 "ld $5, 16(%1)\n\t" \ 4174 "ld $6, 24(%1)\n\t" \ 4175 "ld $7, 32(%1)\n\t" \ 4176 "ld $8, 40(%1)\n\t" \ 4177 "ld $9, 48(%1)\n\t" \ 4178 "ld $10, 56(%1)\n\t" \ 4179 "ld $11, 64(%1)\n\t" \ 4180 "ld $25, 0(%1)\n\t" \ 4181 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 16\n\t" \ 4184 : "r"( &_argvec[0] ) \ 4185 : "memory", __CALLER_SAVED_REGS ); \ 4186 lval = (__typeof__( lval ))_res; \ 4189 #define CALL_FN_W_11W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11 ) \ 4191 volatile OrigFn _orig = ( orig ); \ 4192 volatile unsigned long _argvec[12]; \ 4193 volatile unsigned long _res; \ 4194 _argvec[0] = (unsigned long)_orig.nraddr; \ 4195 _argvec[1] = (unsigned long)( arg1 ); \ 4196 _argvec[2] = (unsigned long)( arg2 ); \ 4197 _argvec[3] = (unsigned long)( arg3 ); \ 4198 _argvec[4] = (unsigned long)( arg4 ); \ 4199 _argvec[5] = (unsigned long)( arg5 ); \ 4200 _argvec[6] = (unsigned long)( arg6 ); \ 4201 _argvec[7] = (unsigned long)( arg7 ); \ 4202 _argvec[8] = (unsigned long)( arg8 ); \ 4203 _argvec[9] = (unsigned long)( arg9 ); \ 4204 _argvec[10] = (unsigned long)( arg10 ); \ 4205 _argvec[11] = (unsigned long)( arg11 ); \ 4206 __asm__ volatile( "dsubu $29, $29, 24\n\t" \ 4207 "ld $4, 72(%1)\n\t" \ 4208 "sd $4, 0($29)\n\t" \ 4209 "ld $4, 80(%1)\n\t" \ 4210 "sd $4, 8($29)\n\t" \ 4211 "ld $4, 88(%1)\n\t" \ 4212 "sd $4, 16($29)\n\t" \ 4213 "ld $4, 8(%1)\n\t" \ 4214 "ld $5, 16(%1)\n\t" \ 4215 "ld $6, 24(%1)\n\t" \ 4216 "ld $7, 32(%1)\n\t" \ 4217 "ld $8, 40(%1)\n\t" \ 4218 "ld $9, 48(%1)\n\t" \ 4219 "ld $10, 56(%1)\n\t" \ 4220 "ld $11, 64(%1)\n\t" \ 4221 "ld $25, 0(%1)\n\t" \ 4222 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 24\n\t" \ 4225 : "r"( &_argvec[0] ) \ 4226 : "memory", __CALLER_SAVED_REGS ); \ 4227 lval = (__typeof__( lval ))_res; \ 4230 #define CALL_FN_W_12W( lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12 ) \ 4232 volatile OrigFn _orig = ( orig ); \ 4233 volatile unsigned long _argvec[13]; \ 4234 volatile unsigned long _res; \ 4235 _argvec[0] = (unsigned long)_orig.nraddr; \ 4236 _argvec[1] = (unsigned long)( arg1 ); \ 4237 _argvec[2] = (unsigned long)( arg2 ); \ 4238 _argvec[3] = (unsigned long)( arg3 ); \ 4239 _argvec[4] = (unsigned long)( arg4 ); \ 4240 _argvec[5] = (unsigned long)( arg5 ); \ 4241 _argvec[6] = (unsigned long)( arg6 ); \ 4242 _argvec[7] = (unsigned long)( arg7 ); \ 4243 _argvec[8] = (unsigned long)( arg8 ); \ 4244 _argvec[9] = (unsigned long)( arg9 ); \ 4245 _argvec[10] = (unsigned long)( arg10 ); \ 4246 _argvec[11] = (unsigned long)( arg11 ); \ 4247 _argvec[12] = (unsigned long)( arg12 ); \ 4248 __asm__ volatile( "dsubu $29, $29, 32\n\t" \ 4249 "ld $4, 72(%1)\n\t" \ 4250 "sd $4, 0($29)\n\t" \ 4251 "ld $4, 80(%1)\n\t" \ 4252 "sd $4, 8($29)\n\t" \ 4253 "ld $4, 88(%1)\n\t" \ 4254 "sd $4, 16($29)\n\t" \ 4255 "ld $4, 96(%1)\n\t" \ 4256 "sd $4, 24($29)\n\t" \ 4257 "ld $4, 8(%1)\n\t" \ 4258 "ld $5, 16(%1)\n\t" \ 4259 "ld $6, 24(%1)\n\t" \ 4260 "ld $7, 32(%1)\n\t" \ 4261 "ld $8, 40(%1)\n\t" \ 4262 "ld $9, 48(%1)\n\t" \ 4263 "ld $10, 56(%1)\n\t" \ 4264 "ld $11, 64(%1)\n\t" \ 4265 "ld $25, 0(%1)\n\t" \ 4266 VALGRIND_CALL_NOREDIR_T9 "daddu $29, $29, 32\n\t" \ 4269 : "r"( &_argvec[0] ) \ 4270 : "memory", __CALLER_SAVED_REGS ); \ 4271 lval = (__typeof__( lval ))_res; \ 4291 #define VG_USERREQ_TOOL_BASE( a, b ) ( (unsigned int)( ( (a)&0xff ) << 24 | ( (b)&0xff ) << 16 ) ) 4292 #define VG_IS_TOOL_USERREQ( a, b, v ) ( VG_USERREQ_TOOL_BASE( a, b ) == ( (v)&0xffff0000 ) ) 4371 #if !defined( __GNUC__ ) 4372 #define __extension__ 4379 #define RUNNING_ON_VALGRIND \ 4380 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__RUNNING_ON_VALGRIND, 0, 0, 0, 0, 0 ) 4386 #define VALGRIND_DISCARD_TRANSLATIONS( _qzz_addr, _qzz_len ) \ 4387 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DISCARD_TRANSLATIONS, _qzz_addr, _qzz_len, 0, 0, 0 ) 4394 #if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER ) 4397 static int VALGRIND_PRINTF(
const char*
format, ... )
__attribute__( ( format( __printf__, 1, 2 ), __unused__ ) );
4400 #if defined( _MSC_VER ) 4403 VALGRIND_PRINTF(
const char* format, ... )
4405 #if defined( NVALGRIND ) 4408 #if defined( _MSC_VER ) || defined( __MINGW64__ ) 4411 unsigned long _qzz_res;
4414 va_start( vargs, format );
4415 #if defined( _MSC_VER ) || defined( __MINGW64__ ) 4420 (
unsigned long)&vargs, 0, 0, 0 );
4423 return (
int)_qzz_res;
4427 #if defined( __GNUC__ ) || defined( __INTEL_COMPILER ) && !defined( _MSC_VER ) 4428 static int VALGRIND_PRINTF_BACKTRACE(
const char* format, ... )
4432 #if defined( _MSC_VER ) 4435 VALGRIND_PRINTF_BACKTRACE(
const char* format, ... )
4437 #if defined( NVALGRIND ) 4440 #if defined( _MSC_VER ) || defined( __MINGW64__ ) 4443 unsigned long _qzz_res;
4446 va_start( vargs, format );
4447 #if defined( _MSC_VER ) || defined( __MINGW64__ ) 4449 (uintptr_t)&vargs, 0, 0, 0 );
4452 (
unsigned long)&vargs, 0, 0, 0 );
4455 return (
int)_qzz_res;
4482 #define VALGRIND_NON_SIMD_CALL0( _qyy_fn ) \ 4483 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL0, _qyy_fn, 0, 0, 0, 0 ) 4485 #define VALGRIND_NON_SIMD_CALL1( _qyy_fn, _qyy_arg1 ) \ 4486 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL1, _qyy_fn, _qyy_arg1, 0, 0, 0 ) 4488 #define VALGRIND_NON_SIMD_CALL2( _qyy_fn, _qyy_arg1, _qyy_arg2 ) \ 4489 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL2, _qyy_fn, _qyy_arg1, _qyy_arg2, 0, \ 4492 #define VALGRIND_NON_SIMD_CALL3( _qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3 ) \ 4493 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__CLIENT_CALL3, _qyy_fn, _qyy_arg1, _qyy_arg2, \ 4499 #define VALGRIND_COUNT_ERRORS \ 4500 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0 , VG_USERREQ__COUNT_ERRORS, 0, 0, 0, 0, 0 ) 4603 #define VALGRIND_MALLOCLIKE_BLOCK( addr, sizeB, rzB, is_zeroed ) \ 4604 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MALLOCLIKE_BLOCK, addr, sizeB, rzB, is_zeroed, 0 ) 4609 #define VALGRIND_RESIZEINPLACE_BLOCK( addr, oldSizeB, newSizeB, rzB ) \ 4610 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__RESIZEINPLACE_BLOCK, addr, oldSizeB, newSizeB, rzB, 0 ) 4615 #define VALGRIND_FREELIKE_BLOCK( addr, rzB ) \ 4616 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__FREELIKE_BLOCK, addr, rzB, 0, 0, 0 ) 4619 #define VALGRIND_CREATE_MEMPOOL( pool, rzB, is_zeroed ) \ 4620 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CREATE_MEMPOOL, pool, rzB, is_zeroed, 0, 0 ) 4623 #define VALGRIND_DESTROY_MEMPOOL( pool ) \ 4624 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__DESTROY_MEMPOOL, pool, 0, 0, 0, 0 ) 4627 #define VALGRIND_MEMPOOL_ALLOC( pool, addr, size ) \ 4628 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_ALLOC, pool, addr, size, 0, 0 ) 4631 #define VALGRIND_MEMPOOL_FREE( pool, addr ) \ 4632 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_FREE, pool, addr, 0, 0, 0 ) 4635 #define VALGRIND_MEMPOOL_TRIM( pool, addr, size ) \ 4636 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_TRIM, pool, addr, size, 0, 0 ) 4639 #define VALGRIND_MOVE_MEMPOOL( poolA, poolB ) \ 4640 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MOVE_MEMPOOL, poolA, poolB, 0, 0, 0 ) 4643 #define VALGRIND_MEMPOOL_CHANGE( pool, addrA, addrB, size ) \ 4644 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__MEMPOOL_CHANGE, pool, addrA, addrB, size, 0 ) 4647 #define VALGRIND_MEMPOOL_EXISTS( pool ) \ 4648 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MEMPOOL_EXISTS, pool, 0, 0, 0, 0 ) 4651 #define VALGRIND_STACK_REGISTER( start, end ) \ 4652 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__STACK_REGISTER, start, end, 0, 0, 0 ) 4656 #define VALGRIND_STACK_DEREGISTER( id ) VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_DEREGISTER, id, 0, 0, 0, 0 ) 4659 #define VALGRIND_STACK_CHANGE( id, start, end ) \ 4660 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__STACK_CHANGE, id, start, end, 0, 0 ) 4663 #define VALGRIND_LOAD_PDB_DEBUGINFO( fd, ptr, total_size, delta ) \ 4664 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__LOAD_PDB_DEBUGINFO, fd, ptr, total_size, delta, 0 ) 4670 #define VALGRIND_MAP_IP_TO_SRCLOC( addr, buf64 ) \ 4671 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__MAP_IP_TO_SRCLOC, addr, buf64, 0, 0, 0 ) 4681 #define VALGRIND_DISABLE_ERROR_REPORTING \ 4682 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, 1, 0, 0, 0, 0 ) 4686 #define VALGRIND_ENABLE_ERROR_REPORTING \ 4687 VALGRIND_DO_CLIENT_REQUEST_STMT( VG_USERREQ__CHANGE_ERR_DISABLEMENT, -1, 0, 0, 0, 0 ) 4694 #define VALGRIND_MONITOR_COMMAND( command ) \ 4695 VALGRIND_DO_CLIENT_REQUEST_EXPR( 0, VG_USERREQ__GDB_MONITOR_COMMAND, command, 0, 0, 0, 0 ) 4697 #undef PLAT_x86_darwin 4698 #undef PLAT_amd64_darwin 4699 #undef PLAT_x86_win32 4700 #undef PLAT_amd64_win64 4701 #undef PLAT_x86_linux 4702 #undef PLAT_amd64_linux 4703 #undef PLAT_ppc32_linux 4704 #undef PLAT_ppc64_linux 4705 #undef PLAT_arm_linux 4706 #undef PLAT_s390x_linux 4707 #undef PLAT_mips32_linux 4708 #undef PLAT_mips64_linux
GAUDI_API std::string format(const char *,...)
MsgStream format utility "a la sprintf(...)".
#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5)