Baloo

valgrind.h
1/* -*- c -*-
2 ----------------------------------------------------------------
3
4 This file is part of Valgrind, a dynamic binary instrumentation
5 framework.
6
7 SPDX-FileCopyrightText: 2000-2017 Julian Seward. All rights reserved.
8
9 SPDX-License-Identifier: bzip2-1.0.6
10
11 ----------------------------------------------------------------
12*/
13
14
15/* This file is for inclusion into client (your!) code.
16
17 You can use these macros to manipulate and query Valgrind's
18 execution inside your own programs.
19
20 The resulting executables will still run without Valgrind, just a
21 little bit more slowly than they otherwise would, but otherwise
22 unchanged. When not running on valgrind, each client request
23 consumes very few (eg. 7) instructions, so the resulting performance
24 loss is negligible unless you plan to execute client requests
25 millions of times per second. Nevertheless, if that is still a
26 problem, you can compile with the NVALGRIND symbol defined (gcc
27 -DNVALGRIND) so that client requests are not even compiled in. */
28
29#ifndef __VALGRIND_H
30#define __VALGRIND_H
31
32
33/* ------------------------------------------------------------------ */
34/* VERSION NUMBER OF VALGRIND */
35/* ------------------------------------------------------------------ */
36
37/* Specify Valgrind's version number, so that user code can
38 conditionally compile based on our version number. Note that these
39 were introduced at version 3.6 and so do not exist in version 3.5
40 or earlier. The recommended way to use them to check for "version
41 X.Y or later" is (eg)
42
43#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
44 && (__VALGRIND_MAJOR__ > 3 \
45 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
46*/
47#define __VALGRIND_MAJOR__ 3
48#define __VALGRIND_MINOR__ 14
49
50
51#include <stdarg.h>
52
53/* Nb: this file might be included in a file compiled with -ansi. So
54 we can't use C++ style "//" comments nor the "asm" keyword (instead
55 use "__asm__"). */
56
57/* Derive some tags indicating what the target platform is. Note
58 that in this file we're using the compiler's CPP symbols for
59 identifying architectures, which are different to the ones we use
60 within the rest of Valgrind. Note, __powerpc__ is active for both
61 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
62 latter (on Linux, that is).
63
64 Misc note: how to find out what's predefined in gcc by default:
65 gcc -Wp,-dM somefile.c
66*/
67#undef PLAT_x86_darwin
68#undef PLAT_amd64_darwin
69#undef PLAT_x86_win32
70#undef PLAT_amd64_win64
71#undef PLAT_x86_linux
72#undef PLAT_amd64_linux
73#undef PLAT_ppc32_linux
74#undef PLAT_ppc64be_linux
75#undef PLAT_ppc64le_linux
76#undef PLAT_arm_linux
77#undef PLAT_arm64_linux
78#undef PLAT_s390x_linux
79#undef PLAT_mips32_linux
80#undef PLAT_mips64_linux
81#undef PLAT_x86_solaris
82#undef PLAT_amd64_solaris
83
84
85#if defined(__APPLE__) && defined(__i386__)
86# define PLAT_x86_darwin 1
87#elif defined(__APPLE__) && defined(__x86_64__)
88# define PLAT_amd64_darwin 1
89#elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
90 || defined(__CYGWIN32__) \
91 || (defined(_WIN32) && defined(_M_IX86))
92# define PLAT_x86_win32 1
93#elif defined(__MINGW64__) \
94 || (defined(_WIN64) && defined(_M_X64))
95# define PLAT_amd64_win64 1
96#elif defined(__linux__) && defined(__i386__)
97# define PLAT_x86_linux 1
98#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
99# define PLAT_amd64_linux 1
100#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
101# define PLAT_ppc32_linux 1
102#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
103/* Big Endian uses ELF version 1 */
104# define PLAT_ppc64be_linux 1
105#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
106/* Little Endian uses ELF version 2 */
107# define PLAT_ppc64le_linux 1
108#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
109# define PLAT_arm_linux 1
110#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
111# define PLAT_arm64_linux 1
112#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
113# define PLAT_s390x_linux 1
114#elif defined(__linux__) && defined(__mips__) && (__mips==64)
115# define PLAT_mips64_linux 1
116#elif defined(__linux__) && defined(__mips__) && (__mips!=64)
117# define PLAT_mips32_linux 1
118#elif defined(__sun) && defined(__i386__)
119# define PLAT_x86_solaris 1
120#elif defined(__sun) && defined(__x86_64__)
121# define PLAT_amd64_solaris 1
122#else
123/* If we're not compiling for our target platform, don't generate
124 any inline asms. */
125# if !defined(NVALGRIND)
126# define NVALGRIND 1
127# endif
128#endif
129
130
131/* ------------------------------------------------------------------ */
132/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
133/* in here of use to end-users -- skip to the next section. */
134/* ------------------------------------------------------------------ */
135
136/*
137 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
138 * request. Accepts both pointers and integers as arguments.
139 *
140 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
141 * client request that does not return a value.
142
143 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
144 * client request and whose value equals the client request result. Accepts
145 * both pointers and integers as arguments. Note that such calls are not
146 * necessarily pure functions -- they may have side effects.
147 */
148
149#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
150 _zzq_request, _zzq_arg1, _zzq_arg2, \
151 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
152 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
153 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
154 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
155
156#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
157 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
158 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
159 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
160 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
161
162#if defined(NVALGRIND)
163
164/* Define NVALGRIND to completely remove the Valgrind magic sequence
165 from the compiled code (analogous to NDEBUG's effects on
166 assert()) */
167#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
168 _zzq_default, _zzq_request, \
169 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
170 (_zzq_default)
171
172#else /* ! NVALGRIND */
173
174/* The following defines the magic code sequences which the JITter
175 spots and handles magically. Don't look too closely at them as
176 they will rot your brain.
177
178 The assembly code sequences for all architectures is in this one
179 file. This is because this file must be stand-alone, and we don't
180 want to have multiple files.
181
182 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
183 value gets put in the return slot, so that everything works when
184 this is executed not under Valgrind. Args are passed in a memory
185 block, and so there's no intrinsic limit to the number that could
186 be passed, but it's currently five.
187
188 The macro args are:
189 _zzq_rlval result lvalue
190 _zzq_default default value (result returned when running on real CPU)
191 _zzq_request request code
192 _zzq_arg1..5 request params
193
194 The other two macros are used to support function wrapping, and are
195 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
196 guest's NRADDR pseudo-register and whatever other information is
197 needed to safely run the call original from the wrapper: on
198 ppc64-linux, the R2 value at the divert point is also needed. This
199 information is abstracted into a user-visible type, OrigFn.
200
201 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
202 guest, but guarantees that the branch instruction will not be
203 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
204 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
205 complete inline asm, since it needs to be combined with more magic
206 inline asm stuff to be useful.
207*/
208
209/* ----------------- x86-{linux,darwin,solaris} ---------------- */
210
211#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
212 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
213 || defined(PLAT_x86_solaris)
214
215typedef
216 struct {
217 unsigned int nraddr; /* where's the code? */
218 }
219 OrigFn;
220
221#define __SPECIAL_INSTRUCTION_PREAMBLE \
222 "roll $3, %%edi ; roll $13, %%edi\n\t" \
223 "roll $29, %%edi ; roll $19, %%edi\n\t"
224
225#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
226 _zzq_default, _zzq_request, \
227 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
228 __extension__ \
229 ({volatile unsigned int _zzq_args[6]; \
230 volatile unsigned int _zzq_result; \
231 _zzq_args[0] = (unsigned int)(_zzq_request); \
232 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
233 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
234 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
235 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
236 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
237 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
238 /* %EDX = client_request ( %EAX ) */ \
239 "xchgl %%ebx,%%ebx" \
240 : "=d" (_zzq_result) \
241 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
242 : "cc", "memory" \
243 ); \
244 _zzq_result; \
245 })
246
247#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
248 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
249 volatile unsigned int __addr; \
250 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
251 /* %EAX = guest_NRADDR */ \
252 "xchgl %%ecx,%%ecx" \
253 : "=a" (__addr) \
254 : \
255 : "cc", "memory" \
256 ); \
257 _zzq_orig->nraddr = __addr; \
258 }
259
260#define VALGRIND_CALL_NOREDIR_EAX \
261 __SPECIAL_INSTRUCTION_PREAMBLE \
262 /* call-noredir *%EAX */ \
263 "xchgl %%edx,%%edx\n\t"
264
265#define VALGRIND_VEX_INJECT_IR() \
266 do { \
267 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
268 "xchgl %%edi,%%edi\n\t" \
269 : : : "cc", "memory" \
270 ); \
271 } while (0)
272
273#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
274 || PLAT_x86_solaris */
275
276/* ------------------------- x86-Win32 ------------------------- */
277
278#if defined(PLAT_x86_win32) && !defined(__GNUC__)
279
280typedef
281 struct {
282 unsigned int nraddr; /* where's the code? */
283 }
284 OrigFn;
285
286#if defined(_MSC_VER)
287
288#define __SPECIAL_INSTRUCTION_PREAMBLE \
289 __asm rol edi, 3 __asm rol edi, 13 \
290 __asm rol edi, 29 __asm rol edi, 19
291
292#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
293 _zzq_default, _zzq_request, \
294 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
295 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
296 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
297 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
298 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
299
300static __inline uintptr_t
305{
306 volatile uintptr_t _zzq_args[6];
307 volatile unsigned int _zzq_result;
316 /* %EDX = client_request ( %EAX ) */
319 }
320 return _zzq_result;
321}
322
323#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
324 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
325 volatile unsigned int __addr; \
326 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
327 /* %EAX = guest_NRADDR */ \
328 __asm xchg ecx,ecx \
329 __asm mov __addr, eax \
330 } \
331 _zzq_orig->nraddr = __addr; \
332 }
333
334#define VALGRIND_CALL_NOREDIR_EAX ERROR
335
336#define VALGRIND_VEX_INJECT_IR() \
337 do { \
338 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
339 __asm xchg edi,edi \
340 } \
341 } while (0)
342
343#else
344#error Unsupported compiler.
345#endif
346
347#endif /* PLAT_x86_win32 */
348
349/* ----------------- amd64-{linux,darwin,solaris} --------------- */
350
351#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
352 || defined(PLAT_amd64_solaris) \
353 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
354
355typedef
356 struct {
357 unsigned long int nraddr; /* where's the code? */
358 }
359 OrigFn;
360
361#define __SPECIAL_INSTRUCTION_PREAMBLE \
362 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
363 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
364
365#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
366 _zzq_default, _zzq_request, \
367 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
368 __extension__ \
369 ({ volatile unsigned long int _zzq_args[6]; \
370 volatile unsigned long int _zzq_result; \
371 _zzq_args[0] = (unsigned long int)(_zzq_request); \
372 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
373 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
374 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
375 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
376 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
377 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
378 /* %RDX = client_request ( %RAX ) */ \
379 "xchgq %%rbx,%%rbx" \
380 : "=d" (_zzq_result) \
381 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
382 : "cc", "memory" \
383 ); \
384 _zzq_result; \
385 })
386
387#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
388 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
389 volatile unsigned long int __addr; \
390 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
391 /* %RAX = guest_NRADDR */ \
392 "xchgq %%rcx,%%rcx" \
393 : "=a" (__addr) \
394 : \
395 : "cc", "memory" \
396 ); \
397 _zzq_orig->nraddr = __addr; \
398 }
399
400#define VALGRIND_CALL_NOREDIR_RAX \
401 __SPECIAL_INSTRUCTION_PREAMBLE \
402 /* call-noredir *%RAX */ \
403 "xchgq %%rdx,%%rdx\n\t"
404
405#define VALGRIND_VEX_INJECT_IR() \
406 do { \
407 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
408 "xchgq %%rdi,%%rdi\n\t" \
409 : : : "cc", "memory" \
410 ); \
411 } while (0)
412
413#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
414
415/* ------------------------- amd64-Win64 ------------------------- */
416
417#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
418
419#error Unsupported compiler.
420
421#endif /* PLAT_amd64_win64 */
422
423/* ------------------------ ppc32-linux ------------------------ */
424
425#if defined(PLAT_ppc32_linux)
426
427typedef
428 struct {
429 unsigned int nraddr; /* where's the code? */
430 }
431 OrigFn;
432
433#define __SPECIAL_INSTRUCTION_PREAMBLE \
434 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
435 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
436
437#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
438 _zzq_default, _zzq_request, \
439 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
440 \
441 __extension__ \
442 ({ unsigned int _zzq_args[6]; \
443 unsigned int _zzq_result; \
444 unsigned int* _zzq_ptr; \
445 _zzq_args[0] = (unsigned int)(_zzq_request); \
446 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
447 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
448 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
449 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
450 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
451 _zzq_ptr = _zzq_args; \
452 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
453 "mr 4,%2\n\t" /*ptr*/ \
454 __SPECIAL_INSTRUCTION_PREAMBLE \
455 /* %R3 = client_request ( %R4 ) */ \
456 "or 1,1,1\n\t" \
457 "mr %0,3" /*result*/ \
458 : "=b" (_zzq_result) \
459 : "b" (_zzq_default), "b" (_zzq_ptr) \
460 : "cc", "memory", "r3", "r4"); \
461 _zzq_result; \
462 })
463
464#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
465 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
466 unsigned int __addr; \
467 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
468 /* %R3 = guest_NRADDR */ \
469 "or 2,2,2\n\t" \
470 "mr %0,3" \
471 : "=b" (__addr) \
472 : \
473 : "cc", "memory", "r3" \
474 ); \
475 _zzq_orig->nraddr = __addr; \
476 }
477
478#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
479 __SPECIAL_INSTRUCTION_PREAMBLE \
480 /* branch-and-link-to-noredir *%R11 */ \
481 "or 3,3,3\n\t"
482
483#define VALGRIND_VEX_INJECT_IR() \
484 do { \
485 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
486 "or 5,5,5\n\t" \
487 ); \
488 } while (0)
489
490#endif /* PLAT_ppc32_linux */
491
492/* ------------------------ ppc64-linux ------------------------ */
493
494#if defined(PLAT_ppc64be_linux)
495
496typedef
497 struct {
498 unsigned long int nraddr; /* where's the code? */
499 unsigned long int r2; /* what tocptr do we need? */
500 }
501 OrigFn;
502
503#define __SPECIAL_INSTRUCTION_PREAMBLE \
504 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
505 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
506
507#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
508 _zzq_default, _zzq_request, \
509 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
510 \
511 __extension__ \
512 ({ unsigned long int _zzq_args[6]; \
513 unsigned long int _zzq_result; \
514 unsigned long int* _zzq_ptr; \
515 _zzq_args[0] = (unsigned long int)(_zzq_request); \
516 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
517 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
518 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
519 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
520 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
521 _zzq_ptr = _zzq_args; \
522 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
523 "mr 4,%2\n\t" /*ptr*/ \
524 __SPECIAL_INSTRUCTION_PREAMBLE \
525 /* %R3 = client_request ( %R4 ) */ \
526 "or 1,1,1\n\t" \
527 "mr %0,3" /*result*/ \
528 : "=b" (_zzq_result) \
529 : "b" (_zzq_default), "b" (_zzq_ptr) \
530 : "cc", "memory", "r3", "r4"); \
531 _zzq_result; \
532 })
533
534#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
535 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
536 unsigned long int __addr; \
537 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
538 /* %R3 = guest_NRADDR */ \
539 "or 2,2,2\n\t" \
540 "mr %0,3" \
541 : "=b" (__addr) \
542 : \
543 : "cc", "memory", "r3" \
544 ); \
545 _zzq_orig->nraddr = __addr; \
546 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
547 /* %R3 = guest_NRADDR_GPR2 */ \
548 "or 4,4,4\n\t" \
549 "mr %0,3" \
550 : "=b" (__addr) \
551 : \
552 : "cc", "memory", "r3" \
553 ); \
554 _zzq_orig->r2 = __addr; \
555 }
556
557#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
558 __SPECIAL_INSTRUCTION_PREAMBLE \
559 /* branch-and-link-to-noredir *%R11 */ \
560 "or 3,3,3\n\t"
561
562#define VALGRIND_VEX_INJECT_IR() \
563 do { \
564 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
565 "or 5,5,5\n\t" \
566 ); \
567 } while (0)
568
569#endif /* PLAT_ppc64be_linux */
570
571#if defined(PLAT_ppc64le_linux)
572
573typedef
574 struct {
575 unsigned long int nraddr; /* where's the code? */
576 unsigned long int r2; /* what tocptr do we need? */
577 }
578 OrigFn;
579
580#define __SPECIAL_INSTRUCTION_PREAMBLE \
581 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
582 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
583
584#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
585 _zzq_default, _zzq_request, \
586 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
587 \
588 __extension__ \
589 ({ unsigned long int _zzq_args[6]; \
590 unsigned long int _zzq_result; \
591 unsigned long int* _zzq_ptr; \
592 _zzq_args[0] = (unsigned long int)(_zzq_request); \
593 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
594 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
595 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
596 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
597 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
598 _zzq_ptr = _zzq_args; \
599 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
600 "mr 4,%2\n\t" /*ptr*/ \
601 __SPECIAL_INSTRUCTION_PREAMBLE \
602 /* %R3 = client_request ( %R4 ) */ \
603 "or 1,1,1\n\t" \
604 "mr %0,3" /*result*/ \
605 : "=b" (_zzq_result) \
606 : "b" (_zzq_default), "b" (_zzq_ptr) \
607 : "cc", "memory", "r3", "r4"); \
608 _zzq_result; \
609 })
610
611#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
612 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
613 unsigned long int __addr; \
614 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
615 /* %R3 = guest_NRADDR */ \
616 "or 2,2,2\n\t" \
617 "mr %0,3" \
618 : "=b" (__addr) \
619 : \
620 : "cc", "memory", "r3" \
621 ); \
622 _zzq_orig->nraddr = __addr; \
623 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
624 /* %R3 = guest_NRADDR_GPR2 */ \
625 "or 4,4,4\n\t" \
626 "mr %0,3" \
627 : "=b" (__addr) \
628 : \
629 : "cc", "memory", "r3" \
630 ); \
631 _zzq_orig->r2 = __addr; \
632 }
633
634#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
635 __SPECIAL_INSTRUCTION_PREAMBLE \
636 /* branch-and-link-to-noredir *%R12 */ \
637 "or 3,3,3\n\t"
638
639#define VALGRIND_VEX_INJECT_IR() \
640 do { \
641 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
642 "or 5,5,5\n\t" \
643 ); \
644 } while (0)
645
646#endif /* PLAT_ppc64le_linux */
647
648/* ------------------------- arm-linux ------------------------- */
649
650#if defined(PLAT_arm_linux)
651
652typedef
653 struct {
654 unsigned int nraddr; /* where's the code? */
655 }
656 OrigFn;
657
658#define __SPECIAL_INSTRUCTION_PREAMBLE \
659 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
660 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
661
662#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
663 _zzq_default, _zzq_request, \
664 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
665 \
666 __extension__ \
667 ({volatile unsigned int _zzq_args[6]; \
668 volatile unsigned int _zzq_result; \
669 _zzq_args[0] = (unsigned int)(_zzq_request); \
670 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
671 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
672 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
673 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
674 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
675 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
676 "mov r4, %2\n\t" /*ptr*/ \
677 __SPECIAL_INSTRUCTION_PREAMBLE \
678 /* R3 = client_request ( R4 ) */ \
679 "orr r10, r10, r10\n\t" \
680 "mov %0, r3" /*result*/ \
681 : "=r" (_zzq_result) \
682 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
683 : "cc","memory", "r3", "r4"); \
684 _zzq_result; \
685 })
686
687#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
688 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
689 unsigned int __addr; \
690 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
691 /* R3 = guest_NRADDR */ \
692 "orr r11, r11, r11\n\t" \
693 "mov %0, r3" \
694 : "=r" (__addr) \
695 : \
696 : "cc", "memory", "r3" \
697 ); \
698 _zzq_orig->nraddr = __addr; \
699 }
700
701#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
702 __SPECIAL_INSTRUCTION_PREAMBLE \
703 /* branch-and-link-to-noredir *%R4 */ \
704 "orr r12, r12, r12\n\t"
705
706#define VALGRIND_VEX_INJECT_IR() \
707 do { \
708 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
709 "orr r9, r9, r9\n\t" \
710 : : : "cc", "memory" \
711 ); \
712 } while (0)
713
714#endif /* PLAT_arm_linux */
715
716/* ------------------------ arm64-linux ------------------------- */
717
718#if defined(PLAT_arm64_linux)
719
720typedef
721 struct {
722 unsigned long int nraddr; /* where's the code? */
723 }
724 OrigFn;
725
726#define __SPECIAL_INSTRUCTION_PREAMBLE \
727 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
728 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
729
730#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
731 _zzq_default, _zzq_request, \
732 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
733 \
734 __extension__ \
735 ({volatile unsigned long int _zzq_args[6]; \
736 volatile unsigned long int _zzq_result; \
737 _zzq_args[0] = (unsigned long int)(_zzq_request); \
738 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
739 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
740 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
741 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
742 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
743 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
744 "mov x4, %2\n\t" /*ptr*/ \
745 __SPECIAL_INSTRUCTION_PREAMBLE \
746 /* X3 = client_request ( X4 ) */ \
747 "orr x10, x10, x10\n\t" \
748 "mov %0, x3" /*result*/ \
749 : "=r" (_zzq_result) \
750 : "r" ((unsigned long int)(_zzq_default)), \
751 "r" (&_zzq_args[0]) \
752 : "cc","memory", "x3", "x4"); \
753 _zzq_result; \
754 })
755
756#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
757 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
758 unsigned long int __addr; \
759 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
760 /* X3 = guest_NRADDR */ \
761 "orr x11, x11, x11\n\t" \
762 "mov %0, x3" \
763 : "=r" (__addr) \
764 : \
765 : "cc", "memory", "x3" \
766 ); \
767 _zzq_orig->nraddr = __addr; \
768 }
769
770#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
771 __SPECIAL_INSTRUCTION_PREAMBLE \
772 /* branch-and-link-to-noredir X8 */ \
773 "orr x12, x12, x12\n\t"
774
775#define VALGRIND_VEX_INJECT_IR() \
776 do { \
777 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
778 "orr x9, x9, x9\n\t" \
779 : : : "cc", "memory" \
780 ); \
781 } while (0)
782
783#endif /* PLAT_arm64_linux */
784
785/* ------------------------ s390x-linux ------------------------ */
786
787#if defined(PLAT_s390x_linux)
788
789typedef
790 struct {
791 unsigned long int nraddr; /* where's the code? */
792 }
793 OrigFn;
794
795/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
796 * code. This detection is implemented in platform specific toIR.c
797 * (e.g. VEX/priv/guest_s390_decoder.c).
798 */
799#define __SPECIAL_INSTRUCTION_PREAMBLE \
800 "lr 15,15\n\t" \
801 "lr 1,1\n\t" \
802 "lr 2,2\n\t" \
803 "lr 3,3\n\t"
804
805#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
806#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
807#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
808#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
809
810#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
811 _zzq_default, _zzq_request, \
812 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
813 __extension__ \
814 ({volatile unsigned long int _zzq_args[6]; \
815 volatile unsigned long int _zzq_result; \
816 _zzq_args[0] = (unsigned long int)(_zzq_request); \
817 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
818 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
819 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
820 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
821 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
822 __asm__ volatile(/* r2 = args */ \
823 "lgr 2,%1\n\t" \
824 /* r3 = default */ \
825 "lgr 3,%2\n\t" \
826 __SPECIAL_INSTRUCTION_PREAMBLE \
827 __CLIENT_REQUEST_CODE \
828 /* results = r3 */ \
829 "lgr %0, 3\n\t" \
830 : "=d" (_zzq_result) \
831 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
832 : "cc", "2", "3", "memory" \
833 ); \
834 _zzq_result; \
835 })
836
837#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
838 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
839 volatile unsigned long int __addr; \
840 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
841 __GET_NR_CONTEXT_CODE \
842 "lgr %0, 3\n\t" \
843 : "=a" (__addr) \
844 : \
845 : "cc", "3", "memory" \
846 ); \
847 _zzq_orig->nraddr = __addr; \
848 }
849
850#define VALGRIND_CALL_NOREDIR_R1 \
851 __SPECIAL_INSTRUCTION_PREAMBLE \
852 __CALL_NO_REDIR_CODE
853
854#define VALGRIND_VEX_INJECT_IR() \
855 do { \
856 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
857 __VEX_INJECT_IR_CODE); \
858 } while (0)
859
860#endif /* PLAT_s390x_linux */
861
862/* ------------------------- mips32-linux ---------------- */
863
864#if defined(PLAT_mips32_linux)
865
866typedef
867 struct {
868 unsigned int nraddr; /* where's the code? */
869 }
870 OrigFn;
871
872/* .word 0x342
873 * .word 0x742
874 * .word 0xC2
875 * .word 0x4C2*/
876#define __SPECIAL_INSTRUCTION_PREAMBLE \
877 "srl $0, $0, 13\n\t" \
878 "srl $0, $0, 29\n\t" \
879 "srl $0, $0, 3\n\t" \
880 "srl $0, $0, 19\n\t"
881
882#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
883 _zzq_default, _zzq_request, \
884 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
885 __extension__ \
886 ({ volatile unsigned int _zzq_args[6]; \
887 volatile unsigned int _zzq_result; \
888 _zzq_args[0] = (unsigned int)(_zzq_request); \
889 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
890 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
891 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
892 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
893 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
894 __asm__ volatile("move $11, %1\n\t" /*default*/ \
895 "move $12, %2\n\t" /*ptr*/ \
896 __SPECIAL_INSTRUCTION_PREAMBLE \
897 /* T3 = client_request ( T4 ) */ \
898 "or $13, $13, $13\n\t" \
899 "move %0, $11\n\t" /*result*/ \
900 : "=r" (_zzq_result) \
901 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
902 : "$11", "$12", "memory"); \
903 _zzq_result; \
904 })
905
906#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
907 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
908 volatile unsigned int __addr; \
909 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
910 /* %t9 = guest_NRADDR */ \
911 "or $14, $14, $14\n\t" \
912 "move %0, $11" /*result*/ \
913 : "=r" (__addr) \
914 : \
915 : "$11" \
916 ); \
917 _zzq_orig->nraddr = __addr; \
918 }
919
920#define VALGRIND_CALL_NOREDIR_T9 \
921 __SPECIAL_INSTRUCTION_PREAMBLE \
922 /* call-noredir *%t9 */ \
923 "or $15, $15, $15\n\t"
924
925#define VALGRIND_VEX_INJECT_IR() \
926 do { \
927 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
928 "or $11, $11, $11\n\t" \
929 ); \
930 } while (0)
931
932
933#endif /* PLAT_mips32_linux */
934
935/* ------------------------- mips64-linux ---------------- */
936
937#if defined(PLAT_mips64_linux)
938
939typedef
940 struct {
941 unsigned long nraddr; /* where's the code? */
942 }
943 OrigFn;
944
945/* dsll $0,$0, 3
946 * dsll $0,$0, 13
947 * dsll $0,$0, 29
948 * dsll $0,$0, 19*/
949#define __SPECIAL_INSTRUCTION_PREAMBLE \
950 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
951 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
952
953#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
954 _zzq_default, _zzq_request, \
955 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
956 __extension__ \
957 ({ volatile unsigned long int _zzq_args[6]; \
958 volatile unsigned long int _zzq_result; \
959 _zzq_args[0] = (unsigned long int)(_zzq_request); \
960 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
961 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
962 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
963 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
964 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
965 __asm__ volatile("move $11, %1\n\t" /*default*/ \
966 "move $12, %2\n\t" /*ptr*/ \
967 __SPECIAL_INSTRUCTION_PREAMBLE \
968 /* $11 = client_request ( $12 ) */ \
969 "or $13, $13, $13\n\t" \
970 "move %0, $11\n\t" /*result*/ \
971 : "=r" (_zzq_result) \
972 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
973 : "$11", "$12", "memory"); \
974 _zzq_result; \
975 })
976
977#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
978 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
979 volatile unsigned long int __addr; \
980 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
981 /* $11 = guest_NRADDR */ \
982 "or $14, $14, $14\n\t" \
983 "move %0, $11" /*result*/ \
984 : "=r" (__addr) \
985 : \
986 : "$11"); \
987 _zzq_orig->nraddr = __addr; \
988 }
989
990#define VALGRIND_CALL_NOREDIR_T9 \
991 __SPECIAL_INSTRUCTION_PREAMBLE \
992 /* call-noredir $25 */ \
993 "or $15, $15, $15\n\t"
994
995#define VALGRIND_VEX_INJECT_IR() \
996 do { \
997 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
998 "or $11, $11, $11\n\t" \
999 ); \
1000 } while (0)
1001
1002#endif /* PLAT_mips64_linux */
1003
1004/* Insert assembly code for other platforms here... */
1005
1006#endif /* NVALGRIND */
1007
1008
1009/* ------------------------------------------------------------------ */
1010/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1011/* ugly. It's the least-worst tradeoff I can think of. */
1012/* ------------------------------------------------------------------ */
1013
1014/* This section defines magic (a.k.a appalling-hack) macros for doing
1015 guaranteed-no-redirection macros, so as to get from function
1016 wrappers to the functions they are wrapping. The whole point is to
1017 construct standard call sequences, but to do the call itself with a
1018 special no-redirect call pseudo-instruction that the JIT
1019 understands and handles specially. This section is long and
1020 repetitious, and I can't see a way to make it shorter.
1021
1022 The naming scheme is as follows:
1023
1024 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1025
1026 'W' stands for "word" and 'v' for "void". Hence there are
1027 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1028 and for each, the possibility of returning a word-typed result, or
1029 no result.
1030*/
1031
1032/* Use these to write the name of your wrapper. NOTE: duplicates
1033 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1034 the default behavior equivalence class tag "0000" into the name.
1035 See pub_tool_redir.h for details -- normally you don't need to
1036 think about this, though. */
1037
1038/* Use an extra level of macroisation so as to ensure the soname/fnname
1039 args are fully macro-expanded before pasting them together. */
1040#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1041
1042#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1043 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1044
1045#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1046 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1047
1048/* Use this macro from within a wrapper function to collect the
1049 context (address and possibly other info) of the original function.
1050 Once you have that you can then use it in one of the CALL_FN_
1051 macros. The type of the argument _lval is OrigFn. */
1052#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1053
1054/* Also provide end-user facilities for function replacement, rather
1055 than wrapping. A replacement function differs from a wrapper in
1056 that it has no way to get hold of the original function being
1057 called, and hence no way to call onwards to it. In a replacement
1058 function, VALGRIND_GET_ORIG_FN always returns zero. */
1059
1060#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1061 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1062
1063#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1064 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1065
1066/* Derivatives of the main macros below, for calling functions
1067 returning void. */
1068
1069#define CALL_FN_v_v(fnptr) \
1070 do { volatile unsigned long _junk; \
1071 CALL_FN_W_v(_junk,fnptr); } while (0)
1072
1073#define CALL_FN_v_W(fnptr, arg1) \
1074 do { volatile unsigned long _junk; \
1075 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1076
1077#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1078 do { volatile unsigned long _junk; \
1079 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1080
1081#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1082 do { volatile unsigned long _junk; \
1083 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1084
1085#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1086 do { volatile unsigned long _junk; \
1087 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1088
1089#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1090 do { volatile unsigned long _junk; \
1091 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1092
1093#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1094 do { volatile unsigned long _junk; \
1095 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1096
1097#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1098 do { volatile unsigned long _junk; \
1099 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1100
1101/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1102
1103#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1104 || defined(PLAT_x86_solaris)
1105
1106/* These regs are trashed by the hidden call. No need to mention eax
1107 as gcc can already see that, plus causes gcc to bomb. */
1108#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1109
1110/* Macros to save and align the stack before making a function
1111 call and restore it afterwards as gcc may not keep the stack
1112 pointer aligned if it doesn't realise calls are being made
1113 to other functions. */
1114
1115#define VALGRIND_ALIGN_STACK \
1116 "movl %%esp,%%edi\n\t" \
1117 "andl $0xfffffff0,%%esp\n\t"
1118#define VALGRIND_RESTORE_STACK \
1119 "movl %%edi,%%esp\n\t"
1120
1121/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1122 long) == 4. */
1123
1124#define CALL_FN_W_v(lval, orig) \
1125 do { \
1126 volatile OrigFn _orig = (orig); \
1127 volatile unsigned long _argvec[1]; \
1128 volatile unsigned long _res; \
1129 _argvec[0] = (unsigned long)_orig.nraddr; \
1130 __asm__ volatile( \
1131 VALGRIND_ALIGN_STACK \
1132 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1133 VALGRIND_CALL_NOREDIR_EAX \
1134 VALGRIND_RESTORE_STACK \
1135 : /*out*/ "=a" (_res) \
1136 : /*in*/ "a" (&_argvec[0]) \
1137 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1138 ); \
1139 lval = (__typeof__(lval)) _res; \
1140 } while (0)
1141
1142#define CALL_FN_W_W(lval, orig, arg1) \
1143 do { \
1144 volatile OrigFn _orig = (orig); \
1145 volatile unsigned long _argvec[2]; \
1146 volatile unsigned long _res; \
1147 _argvec[0] = (unsigned long)_orig.nraddr; \
1148 _argvec[1] = (unsigned long)(arg1); \
1149 __asm__ volatile( \
1150 VALGRIND_ALIGN_STACK \
1151 "subl $12, %%esp\n\t" \
1152 "pushl 4(%%eax)\n\t" \
1153 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1154 VALGRIND_CALL_NOREDIR_EAX \
1155 VALGRIND_RESTORE_STACK \
1156 : /*out*/ "=a" (_res) \
1157 : /*in*/ "a" (&_argvec[0]) \
1158 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1159 ); \
1160 lval = (__typeof__(lval)) _res; \
1161 } while (0)
1162
1163#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1164 do { \
1165 volatile OrigFn _orig = (orig); \
1166 volatile unsigned long _argvec[3]; \
1167 volatile unsigned long _res; \
1168 _argvec[0] = (unsigned long)_orig.nraddr; \
1169 _argvec[1] = (unsigned long)(arg1); \
1170 _argvec[2] = (unsigned long)(arg2); \
1171 __asm__ volatile( \
1172 VALGRIND_ALIGN_STACK \
1173 "subl $8, %%esp\n\t" \
1174 "pushl 8(%%eax)\n\t" \
1175 "pushl 4(%%eax)\n\t" \
1176 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1177 VALGRIND_CALL_NOREDIR_EAX \
1178 VALGRIND_RESTORE_STACK \
1179 : /*out*/ "=a" (_res) \
1180 : /*in*/ "a" (&_argvec[0]) \
1181 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1182 ); \
1183 lval = (__typeof__(lval)) _res; \
1184 } while (0)
1185
1186#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1187 do { \
1188 volatile OrigFn _orig = (orig); \
1189 volatile unsigned long _argvec[4]; \
1190 volatile unsigned long _res; \
1191 _argvec[0] = (unsigned long)_orig.nraddr; \
1192 _argvec[1] = (unsigned long)(arg1); \
1193 _argvec[2] = (unsigned long)(arg2); \
1194 _argvec[3] = (unsigned long)(arg3); \
1195 __asm__ volatile( \
1196 VALGRIND_ALIGN_STACK \
1197 "subl $4, %%esp\n\t" \
1198 "pushl 12(%%eax)\n\t" \
1199 "pushl 8(%%eax)\n\t" \
1200 "pushl 4(%%eax)\n\t" \
1201 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1202 VALGRIND_CALL_NOREDIR_EAX \
1203 VALGRIND_RESTORE_STACK \
1204 : /*out*/ "=a" (_res) \
1205 : /*in*/ "a" (&_argvec[0]) \
1206 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1207 ); \
1208 lval = (__typeof__(lval)) _res; \
1209 } while (0)
1210
1211#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1212 do { \
1213 volatile OrigFn _orig = (orig); \
1214 volatile unsigned long _argvec[5]; \
1215 volatile unsigned long _res; \
1216 _argvec[0] = (unsigned long)_orig.nraddr; \
1217 _argvec[1] = (unsigned long)(arg1); \
1218 _argvec[2] = (unsigned long)(arg2); \
1219 _argvec[3] = (unsigned long)(arg3); \
1220 _argvec[4] = (unsigned long)(arg4); \
1221 __asm__ volatile( \
1222 VALGRIND_ALIGN_STACK \
1223 "pushl 16(%%eax)\n\t" \
1224 "pushl 12(%%eax)\n\t" \
1225 "pushl 8(%%eax)\n\t" \
1226 "pushl 4(%%eax)\n\t" \
1227 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1228 VALGRIND_CALL_NOREDIR_EAX \
1229 VALGRIND_RESTORE_STACK \
1230 : /*out*/ "=a" (_res) \
1231 : /*in*/ "a" (&_argvec[0]) \
1232 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1233 ); \
1234 lval = (__typeof__(lval)) _res; \
1235 } while (0)
1236
1237#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1238 do { \
1239 volatile OrigFn _orig = (orig); \
1240 volatile unsigned long _argvec[6]; \
1241 volatile unsigned long _res; \
1242 _argvec[0] = (unsigned long)_orig.nraddr; \
1243 _argvec[1] = (unsigned long)(arg1); \
1244 _argvec[2] = (unsigned long)(arg2); \
1245 _argvec[3] = (unsigned long)(arg3); \
1246 _argvec[4] = (unsigned long)(arg4); \
1247 _argvec[5] = (unsigned long)(arg5); \
1248 __asm__ volatile( \
1249 VALGRIND_ALIGN_STACK \
1250 "subl $12, %%esp\n\t" \
1251 "pushl 20(%%eax)\n\t" \
1252 "pushl 16(%%eax)\n\t" \
1253 "pushl 12(%%eax)\n\t" \
1254 "pushl 8(%%eax)\n\t" \
1255 "pushl 4(%%eax)\n\t" \
1256 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1257 VALGRIND_CALL_NOREDIR_EAX \
1258 VALGRIND_RESTORE_STACK \
1259 : /*out*/ "=a" (_res) \
1260 : /*in*/ "a" (&_argvec[0]) \
1261 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1262 ); \
1263 lval = (__typeof__(lval)) _res; \
1264 } while (0)
1265
1266#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1267 do { \
1268 volatile OrigFn _orig = (orig); \
1269 volatile unsigned long _argvec[7]; \
1270 volatile unsigned long _res; \
1271 _argvec[0] = (unsigned long)_orig.nraddr; \
1272 _argvec[1] = (unsigned long)(arg1); \
1273 _argvec[2] = (unsigned long)(arg2); \
1274 _argvec[3] = (unsigned long)(arg3); \
1275 _argvec[4] = (unsigned long)(arg4); \
1276 _argvec[5] = (unsigned long)(arg5); \
1277 _argvec[6] = (unsigned long)(arg6); \
1278 __asm__ volatile( \
1279 VALGRIND_ALIGN_STACK \
1280 "subl $8, %%esp\n\t" \
1281 "pushl 24(%%eax)\n\t" \
1282 "pushl 20(%%eax)\n\t" \
1283 "pushl 16(%%eax)\n\t" \
1284 "pushl 12(%%eax)\n\t" \
1285 "pushl 8(%%eax)\n\t" \
1286 "pushl 4(%%eax)\n\t" \
1287 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1288 VALGRIND_CALL_NOREDIR_EAX \
1289 VALGRIND_RESTORE_STACK \
1290 : /*out*/ "=a" (_res) \
1291 : /*in*/ "a" (&_argvec[0]) \
1292 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1293 ); \
1294 lval = (__typeof__(lval)) _res; \
1295 } while (0)
1296
1297#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1298 arg7) \
1299 do { \
1300 volatile OrigFn _orig = (orig); \
1301 volatile unsigned long _argvec[8]; \
1302 volatile unsigned long _res; \
1303 _argvec[0] = (unsigned long)_orig.nraddr; \
1304 _argvec[1] = (unsigned long)(arg1); \
1305 _argvec[2] = (unsigned long)(arg2); \
1306 _argvec[3] = (unsigned long)(arg3); \
1307 _argvec[4] = (unsigned long)(arg4); \
1308 _argvec[5] = (unsigned long)(arg5); \
1309 _argvec[6] = (unsigned long)(arg6); \
1310 _argvec[7] = (unsigned long)(arg7); \
1311 __asm__ volatile( \
1312 VALGRIND_ALIGN_STACK \
1313 "subl $4, %%esp\n\t" \
1314 "pushl 28(%%eax)\n\t" \
1315 "pushl 24(%%eax)\n\t" \
1316 "pushl 20(%%eax)\n\t" \
1317 "pushl 16(%%eax)\n\t" \
1318 "pushl 12(%%eax)\n\t" \
1319 "pushl 8(%%eax)\n\t" \
1320 "pushl 4(%%eax)\n\t" \
1321 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1322 VALGRIND_CALL_NOREDIR_EAX \
1323 VALGRIND_RESTORE_STACK \
1324 : /*out*/ "=a" (_res) \
1325 : /*in*/ "a" (&_argvec[0]) \
1326 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1327 ); \
1328 lval = (__typeof__(lval)) _res; \
1329 } while (0)
1330
1331#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1332 arg7,arg8) \
1333 do { \
1334 volatile OrigFn _orig = (orig); \
1335 volatile unsigned long _argvec[9]; \
1336 volatile unsigned long _res; \
1337 _argvec[0] = (unsigned long)_orig.nraddr; \
1338 _argvec[1] = (unsigned long)(arg1); \
1339 _argvec[2] = (unsigned long)(arg2); \
1340 _argvec[3] = (unsigned long)(arg3); \
1341 _argvec[4] = (unsigned long)(arg4); \
1342 _argvec[5] = (unsigned long)(arg5); \
1343 _argvec[6] = (unsigned long)(arg6); \
1344 _argvec[7] = (unsigned long)(arg7); \
1345 _argvec[8] = (unsigned long)(arg8); \
1346 __asm__ volatile( \
1347 VALGRIND_ALIGN_STACK \
1348 "pushl 32(%%eax)\n\t" \
1349 "pushl 28(%%eax)\n\t" \
1350 "pushl 24(%%eax)\n\t" \
1351 "pushl 20(%%eax)\n\t" \
1352 "pushl 16(%%eax)\n\t" \
1353 "pushl 12(%%eax)\n\t" \
1354 "pushl 8(%%eax)\n\t" \
1355 "pushl 4(%%eax)\n\t" \
1356 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1357 VALGRIND_CALL_NOREDIR_EAX \
1358 VALGRIND_RESTORE_STACK \
1359 : /*out*/ "=a" (_res) \
1360 : /*in*/ "a" (&_argvec[0]) \
1361 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1362 ); \
1363 lval = (__typeof__(lval)) _res; \
1364 } while (0)
1365
1366#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1367 arg7,arg8,arg9) \
1368 do { \
1369 volatile OrigFn _orig = (orig); \
1370 volatile unsigned long _argvec[10]; \
1371 volatile unsigned long _res; \
1372 _argvec[0] = (unsigned long)_orig.nraddr; \
1373 _argvec[1] = (unsigned long)(arg1); \
1374 _argvec[2] = (unsigned long)(arg2); \
1375 _argvec[3] = (unsigned long)(arg3); \
1376 _argvec[4] = (unsigned long)(arg4); \
1377 _argvec[5] = (unsigned long)(arg5); \
1378 _argvec[6] = (unsigned long)(arg6); \
1379 _argvec[7] = (unsigned long)(arg7); \
1380 _argvec[8] = (unsigned long)(arg8); \
1381 _argvec[9] = (unsigned long)(arg9); \
1382 __asm__ volatile( \
1383 VALGRIND_ALIGN_STACK \
1384 "subl $12, %%esp\n\t" \
1385 "pushl 36(%%eax)\n\t" \
1386 "pushl 32(%%eax)\n\t" \
1387 "pushl 28(%%eax)\n\t" \
1388 "pushl 24(%%eax)\n\t" \
1389 "pushl 20(%%eax)\n\t" \
1390 "pushl 16(%%eax)\n\t" \
1391 "pushl 12(%%eax)\n\t" \
1392 "pushl 8(%%eax)\n\t" \
1393 "pushl 4(%%eax)\n\t" \
1394 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1395 VALGRIND_CALL_NOREDIR_EAX \
1396 VALGRIND_RESTORE_STACK \
1397 : /*out*/ "=a" (_res) \
1398 : /*in*/ "a" (&_argvec[0]) \
1399 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1400 ); \
1401 lval = (__typeof__(lval)) _res; \
1402 } while (0)
1403
1404#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1405 arg7,arg8,arg9,arg10) \
1406 do { \
1407 volatile OrigFn _orig = (orig); \
1408 volatile unsigned long _argvec[11]; \
1409 volatile unsigned long _res; \
1410 _argvec[0] = (unsigned long)_orig.nraddr; \
1411 _argvec[1] = (unsigned long)(arg1); \
1412 _argvec[2] = (unsigned long)(arg2); \
1413 _argvec[3] = (unsigned long)(arg3); \
1414 _argvec[4] = (unsigned long)(arg4); \
1415 _argvec[5] = (unsigned long)(arg5); \
1416 _argvec[6] = (unsigned long)(arg6); \
1417 _argvec[7] = (unsigned long)(arg7); \
1418 _argvec[8] = (unsigned long)(arg8); \
1419 _argvec[9] = (unsigned long)(arg9); \
1420 _argvec[10] = (unsigned long)(arg10); \
1421 __asm__ volatile( \
1422 VALGRIND_ALIGN_STACK \
1423 "subl $8, %%esp\n\t" \
1424 "pushl 40(%%eax)\n\t" \
1425 "pushl 36(%%eax)\n\t" \
1426 "pushl 32(%%eax)\n\t" \
1427 "pushl 28(%%eax)\n\t" \
1428 "pushl 24(%%eax)\n\t" \
1429 "pushl 20(%%eax)\n\t" \
1430 "pushl 16(%%eax)\n\t" \
1431 "pushl 12(%%eax)\n\t" \
1432 "pushl 8(%%eax)\n\t" \
1433 "pushl 4(%%eax)\n\t" \
1434 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1435 VALGRIND_CALL_NOREDIR_EAX \
1436 VALGRIND_RESTORE_STACK \
1437 : /*out*/ "=a" (_res) \
1438 : /*in*/ "a" (&_argvec[0]) \
1439 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1440 ); \
1441 lval = (__typeof__(lval)) _res; \
1442 } while (0)
1443
1444#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1445 arg6,arg7,arg8,arg9,arg10, \
1446 arg11) \
1447 do { \
1448 volatile OrigFn _orig = (orig); \
1449 volatile unsigned long _argvec[12]; \
1450 volatile unsigned long _res; \
1451 _argvec[0] = (unsigned long)_orig.nraddr; \
1452 _argvec[1] = (unsigned long)(arg1); \
1453 _argvec[2] = (unsigned long)(arg2); \
1454 _argvec[3] = (unsigned long)(arg3); \
1455 _argvec[4] = (unsigned long)(arg4); \
1456 _argvec[5] = (unsigned long)(arg5); \
1457 _argvec[6] = (unsigned long)(arg6); \
1458 _argvec[7] = (unsigned long)(arg7); \
1459 _argvec[8] = (unsigned long)(arg8); \
1460 _argvec[9] = (unsigned long)(arg9); \
1461 _argvec[10] = (unsigned long)(arg10); \
1462 _argvec[11] = (unsigned long)(arg11); \
1463 __asm__ volatile( \
1464 VALGRIND_ALIGN_STACK \
1465 "subl $4, %%esp\n\t" \
1466 "pushl 44(%%eax)\n\t" \
1467 "pushl 40(%%eax)\n\t" \
1468 "pushl 36(%%eax)\n\t" \
1469 "pushl 32(%%eax)\n\t" \
1470 "pushl 28(%%eax)\n\t" \
1471 "pushl 24(%%eax)\n\t" \
1472 "pushl 20(%%eax)\n\t" \
1473 "pushl 16(%%eax)\n\t" \
1474 "pushl 12(%%eax)\n\t" \
1475 "pushl 8(%%eax)\n\t" \
1476 "pushl 4(%%eax)\n\t" \
1477 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1478 VALGRIND_CALL_NOREDIR_EAX \
1479 VALGRIND_RESTORE_STACK \
1480 : /*out*/ "=a" (_res) \
1481 : /*in*/ "a" (&_argvec[0]) \
1482 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1483 ); \
1484 lval = (__typeof__(lval)) _res; \
1485 } while (0)
1486
1487#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1488 arg6,arg7,arg8,arg9,arg10, \
1489 arg11,arg12) \
1490 do { \
1491 volatile OrigFn _orig = (orig); \
1492 volatile unsigned long _argvec[13]; \
1493 volatile unsigned long _res; \
1494 _argvec[0] = (unsigned long)_orig.nraddr; \
1495 _argvec[1] = (unsigned long)(arg1); \
1496 _argvec[2] = (unsigned long)(arg2); \
1497 _argvec[3] = (unsigned long)(arg3); \
1498 _argvec[4] = (unsigned long)(arg4); \
1499 _argvec[5] = (unsigned long)(arg5); \
1500 _argvec[6] = (unsigned long)(arg6); \
1501 _argvec[7] = (unsigned long)(arg7); \
1502 _argvec[8] = (unsigned long)(arg8); \
1503 _argvec[9] = (unsigned long)(arg9); \
1504 _argvec[10] = (unsigned long)(arg10); \
1505 _argvec[11] = (unsigned long)(arg11); \
1506 _argvec[12] = (unsigned long)(arg12); \
1507 __asm__ volatile( \
1508 VALGRIND_ALIGN_STACK \
1509 "pushl 48(%%eax)\n\t" \
1510 "pushl 44(%%eax)\n\t" \
1511 "pushl 40(%%eax)\n\t" \
1512 "pushl 36(%%eax)\n\t" \
1513 "pushl 32(%%eax)\n\t" \
1514 "pushl 28(%%eax)\n\t" \
1515 "pushl 24(%%eax)\n\t" \
1516 "pushl 20(%%eax)\n\t" \
1517 "pushl 16(%%eax)\n\t" \
1518 "pushl 12(%%eax)\n\t" \
1519 "pushl 8(%%eax)\n\t" \
1520 "pushl 4(%%eax)\n\t" \
1521 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1522 VALGRIND_CALL_NOREDIR_EAX \
1523 VALGRIND_RESTORE_STACK \
1524 : /*out*/ "=a" (_res) \
1525 : /*in*/ "a" (&_argvec[0]) \
1526 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1527 ); \
1528 lval = (__typeof__(lval)) _res; \
1529 } while (0)
1530
1531#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1532
1533/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1534
1535#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1536 || defined(PLAT_amd64_solaris)
1537
1538/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1539
1540/* These regs are trashed by the hidden call. */
1541#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1542 "rdi", "r8", "r9", "r10", "r11"
1543
1544/* This is all pretty complex. It's so as to make stack unwinding
1545 work reliably. See bug 243270. The basic problem is the sub and
1546 add of 128 of %rsp in all of the following macros. If gcc believes
1547 the CFA is in %rsp, then unwinding may fail, because what's at the
1548 CFA is not what gcc "expected" when it constructs the CFIs for the
1549 places where the macros are instantiated.
1550
1551 But we can't just add a CFI annotation to increase the CFA offset
1552 by 128, to match the sub of 128 from %rsp, because we don't know
1553 whether gcc has chosen %rsp as the CFA at that point, or whether it
1554 has chosen some other register (eg, %rbp). In the latter case,
1555 adding a CFI annotation to change the CFA offset is simply wrong.
1556
1557 So the solution is to get hold of the CFA using
1558 __builtin_dwarf_cfa(), put it in a known register, and add a
1559 CFI annotation to say what the register is. We choose %rbp for
1560 this (perhaps perversely), because:
1561
1562 (1) %rbp is already subject to unwinding. If a new register was
1563 chosen then the unwinder would have to unwind it in all stack
1564 traces, which is expensive, and
1565
1566 (2) %rbp is already subject to precise exception updates in the
1567 JIT. If a new register was chosen, we'd have to have precise
1568 exceptions for it too, which reduces performance of the
1569 generated code.
1570
1571 However .. one extra complication. We can't just whack the result
1572 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1573 list of trashed registers at the end of the inline assembly
1574 fragments; gcc won't allow %rbp to appear in that list. Hence
1575 instead we need to stash %rbp in %r15 for the duration of the asm,
1576 and say that %r15 is trashed instead. gcc seems happy to go with
1577 that.
1578
1579 Oh .. and this all needs to be conditionalised so that it is
1580 unchanged from before this commit, when compiled with older gccs
1581 that don't support __builtin_dwarf_cfa. Furthermore, since
1582 this header file is freestanding, it has to be independent of
1583 config.h, and so the following conditionalisation cannot depend on
1584 configure time checks.
1585
1586 Although it's not clear from
1587 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1588 this expression excludes Darwin.
1589 .cfi directives in Darwin assembly appear to be completely
1590 different and I haven't investigated how they work.
1591
1592 For even more entertainment value, note we have to use the
1593 completely undocumented __builtin_dwarf_cfa(), which appears to
1594 really compute the CFA, whereas __builtin_frame_address(0) claims
1595 to but actually doesn't. See
1596 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1597*/
1598#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1599# define __FRAME_POINTER \
1600 ,"r"(__builtin_dwarf_cfa())
1601# define VALGRIND_CFI_PROLOGUE \
1602 "movq %%rbp, %%r15\n\t" \
1603 "movq %2, %%rbp\n\t" \
1604 ".cfi_remember_state\n\t" \
1605 ".cfi_def_cfa rbp, 0\n\t"
1606# define VALGRIND_CFI_EPILOGUE \
1607 "movq %%r15, %%rbp\n\t" \
1608 ".cfi_restore_state\n\t"
1609#else
1610# define __FRAME_POINTER
1611# define VALGRIND_CFI_PROLOGUE
1612# define VALGRIND_CFI_EPILOGUE
1613#endif
1614
1615/* Macros to save and align the stack before making a function
1616 call and restore it afterwards as gcc may not keep the stack
1617 pointer aligned if it doesn't realise calls are being made
1618 to other functions. */
1619
1620#define VALGRIND_ALIGN_STACK \
1621 "movq %%rsp,%%r14\n\t" \
1622 "andq $0xfffffffffffffff0,%%rsp\n\t"
1623#define VALGRIND_RESTORE_STACK \
1624 "movq %%r14,%%rsp\n\t"
1625
1626/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1627 long) == 8. */
1628
1629/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1630 macros. In order not to trash the stack redzone, we need to drop
1631 %rsp by 128 before the hidden call, and restore afterwards. The
1632 nastiness is that it is only by luck that the stack still appears
1633 to be unwindable during the hidden call - since then the behaviour
1634 of any routine using this macro does not match what the CFI data
1635 says. Sigh.
1636
1637 Why is this important? Imagine that a wrapper has a stack
1638 allocated local, and passes to the hidden call, a pointer to it.
1639 Because gcc does not know about the hidden call, it may allocate
1640 that local in the redzone. Unfortunately the hidden call may then
1641 trash it before it comes to use it. So we must step clear of the
1642 redzone, for the duration of the hidden call, to make it safe.
1643
1644 Probably the same problem afflicts the other redzone-style ABIs too
1645 (ppc64-linux); but for those, the stack is
1646 self describing (none of this CFI nonsense) so at least messing
1647 with the stack pointer doesn't give a danger of non-unwindable
1648 stack. */
1649
1650#define CALL_FN_W_v(lval, orig) \
1651 do { \
1652 volatile OrigFn _orig = (orig); \
1653 volatile unsigned long _argvec[1]; \
1654 volatile unsigned long _res; \
1655 _argvec[0] = (unsigned long)_orig.nraddr; \
1656 __asm__ volatile( \
1657 VALGRIND_CFI_PROLOGUE \
1658 VALGRIND_ALIGN_STACK \
1659 "subq $128,%%rsp\n\t" \
1660 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1661 VALGRIND_CALL_NOREDIR_RAX \
1662 VALGRIND_RESTORE_STACK \
1663 VALGRIND_CFI_EPILOGUE \
1664 : /*out*/ "=a" (_res) \
1665 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1666 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1667 ); \
1668 lval = (__typeof__(lval)) _res; \
1669 } while (0)
1670
1671#define CALL_FN_W_W(lval, orig, arg1) \
1672 do { \
1673 volatile OrigFn _orig = (orig); \
1674 volatile unsigned long _argvec[2]; \
1675 volatile unsigned long _res; \
1676 _argvec[0] = (unsigned long)_orig.nraddr; \
1677 _argvec[1] = (unsigned long)(arg1); \
1678 __asm__ volatile( \
1679 VALGRIND_CFI_PROLOGUE \
1680 VALGRIND_ALIGN_STACK \
1681 "subq $128,%%rsp\n\t" \
1682 "movq 8(%%rax), %%rdi\n\t" \
1683 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1684 VALGRIND_CALL_NOREDIR_RAX \
1685 VALGRIND_RESTORE_STACK \
1686 VALGRIND_CFI_EPILOGUE \
1687 : /*out*/ "=a" (_res) \
1688 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1689 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1690 ); \
1691 lval = (__typeof__(lval)) _res; \
1692 } while (0)
1693
1694#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1695 do { \
1696 volatile OrigFn _orig = (orig); \
1697 volatile unsigned long _argvec[3]; \
1698 volatile unsigned long _res; \
1699 _argvec[0] = (unsigned long)_orig.nraddr; \
1700 _argvec[1] = (unsigned long)(arg1); \
1701 _argvec[2] = (unsigned long)(arg2); \
1702 __asm__ volatile( \
1703 VALGRIND_CFI_PROLOGUE \
1704 VALGRIND_ALIGN_STACK \
1705 "subq $128,%%rsp\n\t" \
1706 "movq 16(%%rax), %%rsi\n\t" \
1707 "movq 8(%%rax), %%rdi\n\t" \
1708 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1709 VALGRIND_CALL_NOREDIR_RAX \
1710 VALGRIND_RESTORE_STACK \
1711 VALGRIND_CFI_EPILOGUE \
1712 : /*out*/ "=a" (_res) \
1713 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1714 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1715 ); \
1716 lval = (__typeof__(lval)) _res; \
1717 } while (0)
1718
1719#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1720 do { \
1721 volatile OrigFn _orig = (orig); \
1722 volatile unsigned long _argvec[4]; \
1723 volatile unsigned long _res; \
1724 _argvec[0] = (unsigned long)_orig.nraddr; \
1725 _argvec[1] = (unsigned long)(arg1); \
1726 _argvec[2] = (unsigned long)(arg2); \
1727 _argvec[3] = (unsigned long)(arg3); \
1728 __asm__ volatile( \
1729 VALGRIND_CFI_PROLOGUE \
1730 VALGRIND_ALIGN_STACK \
1731 "subq $128,%%rsp\n\t" \
1732 "movq 24(%%rax), %%rdx\n\t" \
1733 "movq 16(%%rax), %%rsi\n\t" \
1734 "movq 8(%%rax), %%rdi\n\t" \
1735 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1736 VALGRIND_CALL_NOREDIR_RAX \
1737 VALGRIND_RESTORE_STACK \
1738 VALGRIND_CFI_EPILOGUE \
1739 : /*out*/ "=a" (_res) \
1740 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1741 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1742 ); \
1743 lval = (__typeof__(lval)) _res; \
1744 } while (0)
1745
1746#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1747 do { \
1748 volatile OrigFn _orig = (orig); \
1749 volatile unsigned long _argvec[5]; \
1750 volatile unsigned long _res; \
1751 _argvec[0] = (unsigned long)_orig.nraddr; \
1752 _argvec[1] = (unsigned long)(arg1); \
1753 _argvec[2] = (unsigned long)(arg2); \
1754 _argvec[3] = (unsigned long)(arg3); \
1755 _argvec[4] = (unsigned long)(arg4); \
1756 __asm__ volatile( \
1757 VALGRIND_CFI_PROLOGUE \
1758 VALGRIND_ALIGN_STACK \
1759 "subq $128,%%rsp\n\t" \
1760 "movq 32(%%rax), %%rcx\n\t" \
1761 "movq 24(%%rax), %%rdx\n\t" \
1762 "movq 16(%%rax), %%rsi\n\t" \
1763 "movq 8(%%rax), %%rdi\n\t" \
1764 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1765 VALGRIND_CALL_NOREDIR_RAX \
1766 VALGRIND_RESTORE_STACK \
1767 VALGRIND_CFI_EPILOGUE \
1768 : /*out*/ "=a" (_res) \
1769 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1770 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1771 ); \
1772 lval = (__typeof__(lval)) _res; \
1773 } while (0)
1774
1775#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1776 do { \
1777 volatile OrigFn _orig = (orig); \
1778 volatile unsigned long _argvec[6]; \
1779 volatile unsigned long _res; \
1780 _argvec[0] = (unsigned long)_orig.nraddr; \
1781 _argvec[1] = (unsigned long)(arg1); \
1782 _argvec[2] = (unsigned long)(arg2); \
1783 _argvec[3] = (unsigned long)(arg3); \
1784 _argvec[4] = (unsigned long)(arg4); \
1785 _argvec[5] = (unsigned long)(arg5); \
1786 __asm__ volatile( \
1787 VALGRIND_CFI_PROLOGUE \
1788 VALGRIND_ALIGN_STACK \
1789 "subq $128,%%rsp\n\t" \
1790 "movq 40(%%rax), %%r8\n\t" \
1791 "movq 32(%%rax), %%rcx\n\t" \
1792 "movq 24(%%rax), %%rdx\n\t" \
1793 "movq 16(%%rax), %%rsi\n\t" \
1794 "movq 8(%%rax), %%rdi\n\t" \
1795 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1796 VALGRIND_CALL_NOREDIR_RAX \
1797 VALGRIND_RESTORE_STACK \
1798 VALGRIND_CFI_EPILOGUE \
1799 : /*out*/ "=a" (_res) \
1800 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1801 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1802 ); \
1803 lval = (__typeof__(lval)) _res; \
1804 } while (0)
1805
1806#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1807 do { \
1808 volatile OrigFn _orig = (orig); \
1809 volatile unsigned long _argvec[7]; \
1810 volatile unsigned long _res; \
1811 _argvec[0] = (unsigned long)_orig.nraddr; \
1812 _argvec[1] = (unsigned long)(arg1); \
1813 _argvec[2] = (unsigned long)(arg2); \
1814 _argvec[3] = (unsigned long)(arg3); \
1815 _argvec[4] = (unsigned long)(arg4); \
1816 _argvec[5] = (unsigned long)(arg5); \
1817 _argvec[6] = (unsigned long)(arg6); \
1818 __asm__ volatile( \
1819 VALGRIND_CFI_PROLOGUE \
1820 VALGRIND_ALIGN_STACK \
1821 "subq $128,%%rsp\n\t" \
1822 "movq 48(%%rax), %%r9\n\t" \
1823 "movq 40(%%rax), %%r8\n\t" \
1824 "movq 32(%%rax), %%rcx\n\t" \
1825 "movq 24(%%rax), %%rdx\n\t" \
1826 "movq 16(%%rax), %%rsi\n\t" \
1827 "movq 8(%%rax), %%rdi\n\t" \
1828 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1829 VALGRIND_CALL_NOREDIR_RAX \
1830 VALGRIND_RESTORE_STACK \
1831 VALGRIND_CFI_EPILOGUE \
1832 : /*out*/ "=a" (_res) \
1833 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1834 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1835 ); \
1836 lval = (__typeof__(lval)) _res; \
1837 } while (0)
1838
1839#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1840 arg7) \
1841 do { \
1842 volatile OrigFn _orig = (orig); \
1843 volatile unsigned long _argvec[8]; \
1844 volatile unsigned long _res; \
1845 _argvec[0] = (unsigned long)_orig.nraddr; \
1846 _argvec[1] = (unsigned long)(arg1); \
1847 _argvec[2] = (unsigned long)(arg2); \
1848 _argvec[3] = (unsigned long)(arg3); \
1849 _argvec[4] = (unsigned long)(arg4); \
1850 _argvec[5] = (unsigned long)(arg5); \
1851 _argvec[6] = (unsigned long)(arg6); \
1852 _argvec[7] = (unsigned long)(arg7); \
1853 __asm__ volatile( \
1854 VALGRIND_CFI_PROLOGUE \
1855 VALGRIND_ALIGN_STACK \
1856 "subq $136,%%rsp\n\t" \
1857 "pushq 56(%%rax)\n\t" \
1858 "movq 48(%%rax), %%r9\n\t" \
1859 "movq 40(%%rax), %%r8\n\t" \
1860 "movq 32(%%rax), %%rcx\n\t" \
1861 "movq 24(%%rax), %%rdx\n\t" \
1862 "movq 16(%%rax), %%rsi\n\t" \
1863 "movq 8(%%rax), %%rdi\n\t" \
1864 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1865 VALGRIND_CALL_NOREDIR_RAX \
1866 VALGRIND_RESTORE_STACK \
1867 VALGRIND_CFI_EPILOGUE \
1868 : /*out*/ "=a" (_res) \
1869 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1870 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1871 ); \
1872 lval = (__typeof__(lval)) _res; \
1873 } while (0)
1874
1875#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1876 arg7,arg8) \
1877 do { \
1878 volatile OrigFn _orig = (orig); \
1879 volatile unsigned long _argvec[9]; \
1880 volatile unsigned long _res; \
1881 _argvec[0] = (unsigned long)_orig.nraddr; \
1882 _argvec[1] = (unsigned long)(arg1); \
1883 _argvec[2] = (unsigned long)(arg2); \
1884 _argvec[3] = (unsigned long)(arg3); \
1885 _argvec[4] = (unsigned long)(arg4); \
1886 _argvec[5] = (unsigned long)(arg5); \
1887 _argvec[6] = (unsigned long)(arg6); \
1888 _argvec[7] = (unsigned long)(arg7); \
1889 _argvec[8] = (unsigned long)(arg8); \
1890 __asm__ volatile( \
1891 VALGRIND_CFI_PROLOGUE \
1892 VALGRIND_ALIGN_STACK \
1893 "subq $128,%%rsp\n\t" \
1894 "pushq 64(%%rax)\n\t" \
1895 "pushq 56(%%rax)\n\t" \
1896 "movq 48(%%rax), %%r9\n\t" \
1897 "movq 40(%%rax), %%r8\n\t" \
1898 "movq 32(%%rax), %%rcx\n\t" \
1899 "movq 24(%%rax), %%rdx\n\t" \
1900 "movq 16(%%rax), %%rsi\n\t" \
1901 "movq 8(%%rax), %%rdi\n\t" \
1902 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1903 VALGRIND_CALL_NOREDIR_RAX \
1904 VALGRIND_RESTORE_STACK \
1905 VALGRIND_CFI_EPILOGUE \
1906 : /*out*/ "=a" (_res) \
1907 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1908 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1909 ); \
1910 lval = (__typeof__(lval)) _res; \
1911 } while (0)
1912
1913#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1914 arg7,arg8,arg9) \
1915 do { \
1916 volatile OrigFn _orig = (orig); \
1917 volatile unsigned long _argvec[10]; \
1918 volatile unsigned long _res; \
1919 _argvec[0] = (unsigned long)_orig.nraddr; \
1920 _argvec[1] = (unsigned long)(arg1); \
1921 _argvec[2] = (unsigned long)(arg2); \
1922 _argvec[3] = (unsigned long)(arg3); \
1923 _argvec[4] = (unsigned long)(arg4); \
1924 _argvec[5] = (unsigned long)(arg5); \
1925 _argvec[6] = (unsigned long)(arg6); \
1926 _argvec[7] = (unsigned long)(arg7); \
1927 _argvec[8] = (unsigned long)(arg8); \
1928 _argvec[9] = (unsigned long)(arg9); \
1929 __asm__ volatile( \
1930 VALGRIND_CFI_PROLOGUE \
1931 VALGRIND_ALIGN_STACK \
1932 "subq $136,%%rsp\n\t" \
1933 "pushq 72(%%rax)\n\t" \
1934 "pushq 64(%%rax)\n\t" \
1935 "pushq 56(%%rax)\n\t" \
1936 "movq 48(%%rax), %%r9\n\t" \
1937 "movq 40(%%rax), %%r8\n\t" \
1938 "movq 32(%%rax), %%rcx\n\t" \
1939 "movq 24(%%rax), %%rdx\n\t" \
1940 "movq 16(%%rax), %%rsi\n\t" \
1941 "movq 8(%%rax), %%rdi\n\t" \
1942 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1943 VALGRIND_CALL_NOREDIR_RAX \
1944 VALGRIND_RESTORE_STACK \
1945 VALGRIND_CFI_EPILOGUE \
1946 : /*out*/ "=a" (_res) \
1947 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1948 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1949 ); \
1950 lval = (__typeof__(lval)) _res; \
1951 } while (0)
1952
1953#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1954 arg7,arg8,arg9,arg10) \
1955 do { \
1956 volatile OrigFn _orig = (orig); \
1957 volatile unsigned long _argvec[11]; \
1958 volatile unsigned long _res; \
1959 _argvec[0] = (unsigned long)_orig.nraddr; \
1960 _argvec[1] = (unsigned long)(arg1); \
1961 _argvec[2] = (unsigned long)(arg2); \
1962 _argvec[3] = (unsigned long)(arg3); \
1963 _argvec[4] = (unsigned long)(arg4); \
1964 _argvec[5] = (unsigned long)(arg5); \
1965 _argvec[6] = (unsigned long)(arg6); \
1966 _argvec[7] = (unsigned long)(arg7); \
1967 _argvec[8] = (unsigned long)(arg8); \
1968 _argvec[9] = (unsigned long)(arg9); \
1969 _argvec[10] = (unsigned long)(arg10); \
1970 __asm__ volatile( \
1971 VALGRIND_CFI_PROLOGUE \
1972 VALGRIND_ALIGN_STACK \
1973 "subq $128,%%rsp\n\t" \
1974 "pushq 80(%%rax)\n\t" \
1975 "pushq 72(%%rax)\n\t" \
1976 "pushq 64(%%rax)\n\t" \
1977 "pushq 56(%%rax)\n\t" \
1978 "movq 48(%%rax), %%r9\n\t" \
1979 "movq 40(%%rax), %%r8\n\t" \
1980 "movq 32(%%rax), %%rcx\n\t" \
1981 "movq 24(%%rax), %%rdx\n\t" \
1982 "movq 16(%%rax), %%rsi\n\t" \
1983 "movq 8(%%rax), %%rdi\n\t" \
1984 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1985 VALGRIND_CALL_NOREDIR_RAX \
1986 VALGRIND_RESTORE_STACK \
1987 VALGRIND_CFI_EPILOGUE \
1988 : /*out*/ "=a" (_res) \
1989 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1990 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1991 ); \
1992 lval = (__typeof__(lval)) _res; \
1993 } while (0)
1994
1995#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1996 arg7,arg8,arg9,arg10,arg11) \
1997 do { \
1998 volatile OrigFn _orig = (orig); \
1999 volatile unsigned long _argvec[12]; \
2000 volatile unsigned long _res; \
2001 _argvec[0] = (unsigned long)_orig.nraddr; \
2002 _argvec[1] = (unsigned long)(arg1); \
2003 _argvec[2] = (unsigned long)(arg2); \
2004 _argvec[3] = (unsigned long)(arg3); \
2005 _argvec[4] = (unsigned long)(arg4); \
2006 _argvec[5] = (unsigned long)(arg5); \
2007 _argvec[6] = (unsigned long)(arg6); \
2008 _argvec[7] = (unsigned long)(arg7); \
2009 _argvec[8] = (unsigned long)(arg8); \
2010 _argvec[9] = (unsigned long)(arg9); \
2011 _argvec[10] = (unsigned long)(arg10); \
2012 _argvec[11] = (unsigned long)(arg11); \
2013 __asm__ volatile( \
2014 VALGRIND_CFI_PROLOGUE \
2015 VALGRIND_ALIGN_STACK \
2016 "subq $136,%%rsp\n\t" \
2017 "pushq 88(%%rax)\n\t" \
2018 "pushq 80(%%rax)\n\t" \
2019 "pushq 72(%%rax)\n\t" \
2020 "pushq 64(%%rax)\n\t" \
2021 "pushq 56(%%rax)\n\t" \
2022 "movq 48(%%rax), %%r9\n\t" \
2023 "movq 40(%%rax), %%r8\n\t" \
2024 "movq 32(%%rax), %%rcx\n\t" \
2025 "movq 24(%%rax), %%rdx\n\t" \
2026 "movq 16(%%rax), %%rsi\n\t" \
2027 "movq 8(%%rax), %%rdi\n\t" \
2028 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2029 VALGRIND_CALL_NOREDIR_RAX \
2030 VALGRIND_RESTORE_STACK \
2031 VALGRIND_CFI_EPILOGUE \
2032 : /*out*/ "=a" (_res) \
2033 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2035 ); \
2036 lval = (__typeof__(lval)) _res; \
2037 } while (0)
2038
2039#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2040 arg7,arg8,arg9,arg10,arg11,arg12) \
2041 do { \
2042 volatile OrigFn _orig = (orig); \
2043 volatile unsigned long _argvec[13]; \
2044 volatile unsigned long _res; \
2045 _argvec[0] = (unsigned long)_orig.nraddr; \
2046 _argvec[1] = (unsigned long)(arg1); \
2047 _argvec[2] = (unsigned long)(arg2); \
2048 _argvec[3] = (unsigned long)(arg3); \
2049 _argvec[4] = (unsigned long)(arg4); \
2050 _argvec[5] = (unsigned long)(arg5); \
2051 _argvec[6] = (unsigned long)(arg6); \
2052 _argvec[7] = (unsigned long)(arg7); \
2053 _argvec[8] = (unsigned long)(arg8); \
2054 _argvec[9] = (unsigned long)(arg9); \
2055 _argvec[10] = (unsigned long)(arg10); \
2056 _argvec[11] = (unsigned long)(arg11); \
2057 _argvec[12] = (unsigned long)(arg12); \
2058 __asm__ volatile( \
2059 VALGRIND_CFI_PROLOGUE \
2060 VALGRIND_ALIGN_STACK \
2061 "subq $128,%%rsp\n\t" \
2062 "pushq 96(%%rax)\n\t" \
2063 "pushq 88(%%rax)\n\t" \
2064 "pushq 80(%%rax)\n\t" \
2065 "pushq 72(%%rax)\n\t" \
2066 "pushq 64(%%rax)\n\t" \
2067 "pushq 56(%%rax)\n\t" \
2068 "movq 48(%%rax), %%r9\n\t" \
2069 "movq 40(%%rax), %%r8\n\t" \
2070 "movq 32(%%rax), %%rcx\n\t" \
2071 "movq 24(%%rax), %%rdx\n\t" \
2072 "movq 16(%%rax), %%rsi\n\t" \
2073 "movq 8(%%rax), %%rdi\n\t" \
2074 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2075 VALGRIND_CALL_NOREDIR_RAX \
2076 VALGRIND_RESTORE_STACK \
2077 VALGRIND_CFI_EPILOGUE \
2078 : /*out*/ "=a" (_res) \
2079 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2080 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2081 ); \
2082 lval = (__typeof__(lval)) _res; \
2083 } while (0)
2084
2085#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2086
2087/* ------------------------ ppc32-linux ------------------------ */
2088
2089#if defined(PLAT_ppc32_linux)
2090
2091/* This is useful for finding out about the on-stack stuff:
2092
2093 extern int f9 ( int,int,int,int,int,int,int,int,int );
2094 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2095 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2096 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2097
2098 int g9 ( void ) {
2099 return f9(11,22,33,44,55,66,77,88,99);
2100 }
2101 int g10 ( void ) {
2102 return f10(11,22,33,44,55,66,77,88,99,110);
2103 }
2104 int g11 ( void ) {
2105 return f11(11,22,33,44,55,66,77,88,99,110,121);
2106 }
2107 int g12 ( void ) {
2108 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2109 }
2110*/
2111
2112/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2113
2114/* These regs are trashed by the hidden call. */
2115#define __CALLER_SAVED_REGS \
2116 "lr", "ctr", "xer", \
2117 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2118 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2119 "r11", "r12", "r13"
2120
2121/* Macros to save and align the stack before making a function
2122 call and restore it afterwards as gcc may not keep the stack
2123 pointer aligned if it doesn't realise calls are being made
2124 to other functions. */
2125
2126#define VALGRIND_ALIGN_STACK \
2127 "mr 28,1\n\t" \
2128 "rlwinm 1,1,0,0,27\n\t"
2129#define VALGRIND_RESTORE_STACK \
2130 "mr 1,28\n\t"
2131
2132/* These CALL_FN_ macros assume that on ppc32-linux,
2133 sizeof(unsigned long) == 4. */
2134
2135#define CALL_FN_W_v(lval, orig) \
2136 do { \
2137 volatile OrigFn _orig = (orig); \
2138 volatile unsigned long _argvec[1]; \
2139 volatile unsigned long _res; \
2140 _argvec[0] = (unsigned long)_orig.nraddr; \
2141 __asm__ volatile( \
2142 VALGRIND_ALIGN_STACK \
2143 "mr 11,%1\n\t" \
2144 "lwz 11,0(11)\n\t" /* target->r11 */ \
2145 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2146 VALGRIND_RESTORE_STACK \
2147 "mr %0,3" \
2148 : /*out*/ "=r" (_res) \
2149 : /*in*/ "r" (&_argvec[0]) \
2150 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2151 ); \
2152 lval = (__typeof__(lval)) _res; \
2153 } while (0)
2154
2155#define CALL_FN_W_W(lval, orig, arg1) \
2156 do { \
2157 volatile OrigFn _orig = (orig); \
2158 volatile unsigned long _argvec[2]; \
2159 volatile unsigned long _res; \
2160 _argvec[0] = (unsigned long)_orig.nraddr; \
2161 _argvec[1] = (unsigned long)arg1; \
2162 __asm__ volatile( \
2163 VALGRIND_ALIGN_STACK \
2164 "mr 11,%1\n\t" \
2165 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2166 "lwz 11,0(11)\n\t" /* target->r11 */ \
2167 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2168 VALGRIND_RESTORE_STACK \
2169 "mr %0,3" \
2170 : /*out*/ "=r" (_res) \
2171 : /*in*/ "r" (&_argvec[0]) \
2172 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2173 ); \
2174 lval = (__typeof__(lval)) _res; \
2175 } while (0)
2176
2177#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2178 do { \
2179 volatile OrigFn _orig = (orig); \
2180 volatile unsigned long _argvec[3]; \
2181 volatile unsigned long _res; \
2182 _argvec[0] = (unsigned long)_orig.nraddr; \
2183 _argvec[1] = (unsigned long)arg1; \
2184 _argvec[2] = (unsigned long)arg2; \
2185 __asm__ volatile( \
2186 VALGRIND_ALIGN_STACK \
2187 "mr 11,%1\n\t" \
2188 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2189 "lwz 4,8(11)\n\t" \
2190 "lwz 11,0(11)\n\t" /* target->r11 */ \
2191 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2192 VALGRIND_RESTORE_STACK \
2193 "mr %0,3" \
2194 : /*out*/ "=r" (_res) \
2195 : /*in*/ "r" (&_argvec[0]) \
2196 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2197 ); \
2198 lval = (__typeof__(lval)) _res; \
2199 } while (0)
2200
2201#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2202 do { \
2203 volatile OrigFn _orig = (orig); \
2204 volatile unsigned long _argvec[4]; \
2205 volatile unsigned long _res; \
2206 _argvec[0] = (unsigned long)_orig.nraddr; \
2207 _argvec[1] = (unsigned long)arg1; \
2208 _argvec[2] = (unsigned long)arg2; \
2209 _argvec[3] = (unsigned long)arg3; \
2210 __asm__ volatile( \
2211 VALGRIND_ALIGN_STACK \
2212 "mr 11,%1\n\t" \
2213 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2214 "lwz 4,8(11)\n\t" \
2215 "lwz 5,12(11)\n\t" \
2216 "lwz 11,0(11)\n\t" /* target->r11 */ \
2217 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2218 VALGRIND_RESTORE_STACK \
2219 "mr %0,3" \
2220 : /*out*/ "=r" (_res) \
2221 : /*in*/ "r" (&_argvec[0]) \
2222 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2223 ); \
2224 lval = (__typeof__(lval)) _res; \
2225 } while (0)
2226
2227#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2228 do { \
2229 volatile OrigFn _orig = (orig); \
2230 volatile unsigned long _argvec[5]; \
2231 volatile unsigned long _res; \
2232 _argvec[0] = (unsigned long)_orig.nraddr; \
2233 _argvec[1] = (unsigned long)arg1; \
2234 _argvec[2] = (unsigned long)arg2; \
2235 _argvec[3] = (unsigned long)arg3; \
2236 _argvec[4] = (unsigned long)arg4; \
2237 __asm__ volatile( \
2238 VALGRIND_ALIGN_STACK \
2239 "mr 11,%1\n\t" \
2240 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2241 "lwz 4,8(11)\n\t" \
2242 "lwz 5,12(11)\n\t" \
2243 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2244 "lwz 11,0(11)\n\t" /* target->r11 */ \
2245 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2246 VALGRIND_RESTORE_STACK \
2247 "mr %0,3" \
2248 : /*out*/ "=r" (_res) \
2249 : /*in*/ "r" (&_argvec[0]) \
2250 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2251 ); \
2252 lval = (__typeof__(lval)) _res; \
2253 } while (0)
2254
2255#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2256 do { \
2257 volatile OrigFn _orig = (orig); \
2258 volatile unsigned long _argvec[6]; \
2259 volatile unsigned long _res; \
2260 _argvec[0] = (unsigned long)_orig.nraddr; \
2261 _argvec[1] = (unsigned long)arg1; \
2262 _argvec[2] = (unsigned long)arg2; \
2263 _argvec[3] = (unsigned long)arg3; \
2264 _argvec[4] = (unsigned long)arg4; \
2265 _argvec[5] = (unsigned long)arg5; \
2266 __asm__ volatile( \
2267 VALGRIND_ALIGN_STACK \
2268 "mr 11,%1\n\t" \
2269 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2270 "lwz 4,8(11)\n\t" \
2271 "lwz 5,12(11)\n\t" \
2272 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2273 "lwz 7,20(11)\n\t" \
2274 "lwz 11,0(11)\n\t" /* target->r11 */ \
2275 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2276 VALGRIND_RESTORE_STACK \
2277 "mr %0,3" \
2278 : /*out*/ "=r" (_res) \
2279 : /*in*/ "r" (&_argvec[0]) \
2280 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2281 ); \
2282 lval = (__typeof__(lval)) _res; \
2283 } while (0)
2284
2285#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2286 do { \
2287 volatile OrigFn _orig = (orig); \
2288 volatile unsigned long _argvec[7]; \
2289 volatile unsigned long _res; \
2290 _argvec[0] = (unsigned long)_orig.nraddr; \
2291 _argvec[1] = (unsigned long)arg1; \
2292 _argvec[2] = (unsigned long)arg2; \
2293 _argvec[3] = (unsigned long)arg3; \
2294 _argvec[4] = (unsigned long)arg4; \
2295 _argvec[5] = (unsigned long)arg5; \
2296 _argvec[6] = (unsigned long)arg6; \
2297 __asm__ volatile( \
2298 VALGRIND_ALIGN_STACK \
2299 "mr 11,%1\n\t" \
2300 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2301 "lwz 4,8(11)\n\t" \
2302 "lwz 5,12(11)\n\t" \
2303 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2304 "lwz 7,20(11)\n\t" \
2305 "lwz 8,24(11)\n\t" \
2306 "lwz 11,0(11)\n\t" /* target->r11 */ \
2307 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2308 VALGRIND_RESTORE_STACK \
2309 "mr %0,3" \
2310 : /*out*/ "=r" (_res) \
2311 : /*in*/ "r" (&_argvec[0]) \
2312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2313 ); \
2314 lval = (__typeof__(lval)) _res; \
2315 } while (0)
2316
2317#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2318 arg7) \
2319 do { \
2320 volatile OrigFn _orig = (orig); \
2321 volatile unsigned long _argvec[8]; \
2322 volatile unsigned long _res; \
2323 _argvec[0] = (unsigned long)_orig.nraddr; \
2324 _argvec[1] = (unsigned long)arg1; \
2325 _argvec[2] = (unsigned long)arg2; \
2326 _argvec[3] = (unsigned long)arg3; \
2327 _argvec[4] = (unsigned long)arg4; \
2328 _argvec[5] = (unsigned long)arg5; \
2329 _argvec[6] = (unsigned long)arg6; \
2330 _argvec[7] = (unsigned long)arg7; \
2331 __asm__ volatile( \
2332 VALGRIND_ALIGN_STACK \
2333 "mr 11,%1\n\t" \
2334 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2335 "lwz 4,8(11)\n\t" \
2336 "lwz 5,12(11)\n\t" \
2337 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2338 "lwz 7,20(11)\n\t" \
2339 "lwz 8,24(11)\n\t" \
2340 "lwz 9,28(11)\n\t" \
2341 "lwz 11,0(11)\n\t" /* target->r11 */ \
2342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2343 VALGRIND_RESTORE_STACK \
2344 "mr %0,3" \
2345 : /*out*/ "=r" (_res) \
2346 : /*in*/ "r" (&_argvec[0]) \
2347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2348 ); \
2349 lval = (__typeof__(lval)) _res; \
2350 } while (0)
2351
2352#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2353 arg7,arg8) \
2354 do { \
2355 volatile OrigFn _orig = (orig); \
2356 volatile unsigned long _argvec[9]; \
2357 volatile unsigned long _res; \
2358 _argvec[0] = (unsigned long)_orig.nraddr; \
2359 _argvec[1] = (unsigned long)arg1; \
2360 _argvec[2] = (unsigned long)arg2; \
2361 _argvec[3] = (unsigned long)arg3; \
2362 _argvec[4] = (unsigned long)arg4; \
2363 _argvec[5] = (unsigned long)arg5; \
2364 _argvec[6] = (unsigned long)arg6; \
2365 _argvec[7] = (unsigned long)arg7; \
2366 _argvec[8] = (unsigned long)arg8; \
2367 __asm__ volatile( \
2368 VALGRIND_ALIGN_STACK \
2369 "mr 11,%1\n\t" \
2370 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2371 "lwz 4,8(11)\n\t" \
2372 "lwz 5,12(11)\n\t" \
2373 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2374 "lwz 7,20(11)\n\t" \
2375 "lwz 8,24(11)\n\t" \
2376 "lwz 9,28(11)\n\t" \
2377 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2378 "lwz 11,0(11)\n\t" /* target->r11 */ \
2379 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2380 VALGRIND_RESTORE_STACK \
2381 "mr %0,3" \
2382 : /*out*/ "=r" (_res) \
2383 : /*in*/ "r" (&_argvec[0]) \
2384 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2385 ); \
2386 lval = (__typeof__(lval)) _res; \
2387 } while (0)
2388
2389#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2390 arg7,arg8,arg9) \
2391 do { \
2392 volatile OrigFn _orig = (orig); \
2393 volatile unsigned long _argvec[10]; \
2394 volatile unsigned long _res; \
2395 _argvec[0] = (unsigned long)_orig.nraddr; \
2396 _argvec[1] = (unsigned long)arg1; \
2397 _argvec[2] = (unsigned long)arg2; \
2398 _argvec[3] = (unsigned long)arg3; \
2399 _argvec[4] = (unsigned long)arg4; \
2400 _argvec[5] = (unsigned long)arg5; \
2401 _argvec[6] = (unsigned long)arg6; \
2402 _argvec[7] = (unsigned long)arg7; \
2403 _argvec[8] = (unsigned long)arg8; \
2404 _argvec[9] = (unsigned long)arg9; \
2405 __asm__ volatile( \
2406 VALGRIND_ALIGN_STACK \
2407 "mr 11,%1\n\t" \
2408 "addi 1,1,-16\n\t" \
2409 /* arg9 */ \
2410 "lwz 3,36(11)\n\t" \
2411 "stw 3,8(1)\n\t" \
2412 /* args1-8 */ \
2413 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2414 "lwz 4,8(11)\n\t" \
2415 "lwz 5,12(11)\n\t" \
2416 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2417 "lwz 7,20(11)\n\t" \
2418 "lwz 8,24(11)\n\t" \
2419 "lwz 9,28(11)\n\t" \
2420 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2421 "lwz 11,0(11)\n\t" /* target->r11 */ \
2422 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2423 VALGRIND_RESTORE_STACK \
2424 "mr %0,3" \
2425 : /*out*/ "=r" (_res) \
2426 : /*in*/ "r" (&_argvec[0]) \
2427 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2428 ); \
2429 lval = (__typeof__(lval)) _res; \
2430 } while (0)
2431
2432#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2433 arg7,arg8,arg9,arg10) \
2434 do { \
2435 volatile OrigFn _orig = (orig); \
2436 volatile unsigned long _argvec[11]; \
2437 volatile unsigned long _res; \
2438 _argvec[0] = (unsigned long)_orig.nraddr; \
2439 _argvec[1] = (unsigned long)arg1; \
2440 _argvec[2] = (unsigned long)arg2; \
2441 _argvec[3] = (unsigned long)arg3; \
2442 _argvec[4] = (unsigned long)arg4; \
2443 _argvec[5] = (unsigned long)arg5; \
2444 _argvec[6] = (unsigned long)arg6; \
2445 _argvec[7] = (unsigned long)arg7; \
2446 _argvec[8] = (unsigned long)arg8; \
2447 _argvec[9] = (unsigned long)arg9; \
2448 _argvec[10] = (unsigned long)arg10; \
2449 __asm__ volatile( \
2450 VALGRIND_ALIGN_STACK \
2451 "mr 11,%1\n\t" \
2452 "addi 1,1,-16\n\t" \
2453 /* arg10 */ \
2454 "lwz 3,40(11)\n\t" \
2455 "stw 3,12(1)\n\t" \
2456 /* arg9 */ \
2457 "lwz 3,36(11)\n\t" \
2458 "stw 3,8(1)\n\t" \
2459 /* args1-8 */ \
2460 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2461 "lwz 4,8(11)\n\t" \
2462 "lwz 5,12(11)\n\t" \
2463 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2464 "lwz 7,20(11)\n\t" \
2465 "lwz 8,24(11)\n\t" \
2466 "lwz 9,28(11)\n\t" \
2467 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2468 "lwz 11,0(11)\n\t" /* target->r11 */ \
2469 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2470 VALGRIND_RESTORE_STACK \
2471 "mr %0,3" \
2472 : /*out*/ "=r" (_res) \
2473 : /*in*/ "r" (&_argvec[0]) \
2474 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2475 ); \
2476 lval = (__typeof__(lval)) _res; \
2477 } while (0)
2478
2479#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2480 arg7,arg8,arg9,arg10,arg11) \
2481 do { \
2482 volatile OrigFn _orig = (orig); \
2483 volatile unsigned long _argvec[12]; \
2484 volatile unsigned long _res; \
2485 _argvec[0] = (unsigned long)_orig.nraddr; \
2486 _argvec[1] = (unsigned long)arg1; \
2487 _argvec[2] = (unsigned long)arg2; \
2488 _argvec[3] = (unsigned long)arg3; \
2489 _argvec[4] = (unsigned long)arg4; \
2490 _argvec[5] = (unsigned long)arg5; \
2491 _argvec[6] = (unsigned long)arg6; \
2492 _argvec[7] = (unsigned long)arg7; \
2493 _argvec[8] = (unsigned long)arg8; \
2494 _argvec[9] = (unsigned long)arg9; \
2495 _argvec[10] = (unsigned long)arg10; \
2496 _argvec[11] = (unsigned long)arg11; \
2497 __asm__ volatile( \
2498 VALGRIND_ALIGN_STACK \
2499 "mr 11,%1\n\t" \
2500 "addi 1,1,-32\n\t" \
2501 /* arg11 */ \
2502 "lwz 3,44(11)\n\t" \
2503 "stw 3,16(1)\n\t" \
2504 /* arg10 */ \
2505 "lwz 3,40(11)\n\t" \
2506 "stw 3,12(1)\n\t" \
2507 /* arg9 */ \
2508 "lwz 3,36(11)\n\t" \
2509 "stw 3,8(1)\n\t" \
2510 /* args1-8 */ \
2511 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2512 "lwz 4,8(11)\n\t" \
2513 "lwz 5,12(11)\n\t" \
2514 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2515 "lwz 7,20(11)\n\t" \
2516 "lwz 8,24(11)\n\t" \
2517 "lwz 9,28(11)\n\t" \
2518 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2519 "lwz 11,0(11)\n\t" /* target->r11 */ \
2520 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2521 VALGRIND_RESTORE_STACK \
2522 "mr %0,3" \
2523 : /*out*/ "=r" (_res) \
2524 : /*in*/ "r" (&_argvec[0]) \
2525 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2526 ); \
2527 lval = (__typeof__(lval)) _res; \
2528 } while (0)
2529
2530#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2531 arg7,arg8,arg9,arg10,arg11,arg12) \
2532 do { \
2533 volatile OrigFn _orig = (orig); \
2534 volatile unsigned long _argvec[13]; \
2535 volatile unsigned long _res; \
2536 _argvec[0] = (unsigned long)_orig.nraddr; \
2537 _argvec[1] = (unsigned long)arg1; \
2538 _argvec[2] = (unsigned long)arg2; \
2539 _argvec[3] = (unsigned long)arg3; \
2540 _argvec[4] = (unsigned long)arg4; \
2541 _argvec[5] = (unsigned long)arg5; \
2542 _argvec[6] = (unsigned long)arg6; \
2543 _argvec[7] = (unsigned long)arg7; \
2544 _argvec[8] = (unsigned long)arg8; \
2545 _argvec[9] = (unsigned long)arg9; \
2546 _argvec[10] = (unsigned long)arg10; \
2547 _argvec[11] = (unsigned long)arg11; \
2548 _argvec[12] = (unsigned long)arg12; \
2549 __asm__ volatile( \
2550 VALGRIND_ALIGN_STACK \
2551 "mr 11,%1\n\t" \
2552 "addi 1,1,-32\n\t" \
2553 /* arg12 */ \
2554 "lwz 3,48(11)\n\t" \
2555 "stw 3,20(1)\n\t" \
2556 /* arg11 */ \
2557 "lwz 3,44(11)\n\t" \
2558 "stw 3,16(1)\n\t" \
2559 /* arg10 */ \
2560 "lwz 3,40(11)\n\t" \
2561 "stw 3,12(1)\n\t" \
2562 /* arg9 */ \
2563 "lwz 3,36(11)\n\t" \
2564 "stw 3,8(1)\n\t" \
2565 /* args1-8 */ \
2566 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2567 "lwz 4,8(11)\n\t" \
2568 "lwz 5,12(11)\n\t" \
2569 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2570 "lwz 7,20(11)\n\t" \
2571 "lwz 8,24(11)\n\t" \
2572 "lwz 9,28(11)\n\t" \
2573 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2574 "lwz 11,0(11)\n\t" /* target->r11 */ \
2575 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2576 VALGRIND_RESTORE_STACK \
2577 "mr %0,3" \
2578 : /*out*/ "=r" (_res) \
2579 : /*in*/ "r" (&_argvec[0]) \
2580 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2581 ); \
2582 lval = (__typeof__(lval)) _res; \
2583 } while (0)
2584
2585#endif /* PLAT_ppc32_linux */
2586
2587/* ------------------------ ppc64-linux ------------------------ */
2588
2589#if defined(PLAT_ppc64be_linux)
2590
2591/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2592
2593/* These regs are trashed by the hidden call. */
2594#define __CALLER_SAVED_REGS \
2595 "lr", "ctr", "xer", \
2596 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2597 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2598 "r11", "r12", "r13"
2599
2600/* Macros to save and align the stack before making a function
2601 call and restore it afterwards as gcc may not keep the stack
2602 pointer aligned if it doesn't realise calls are being made
2603 to other functions. */
2604
2605#define VALGRIND_ALIGN_STACK \
2606 "mr 28,1\n\t" \
2607 "rldicr 1,1,0,59\n\t"
2608#define VALGRIND_RESTORE_STACK \
2609 "mr 1,28\n\t"
2610
2611/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2612 long) == 8. */
2613
2614#define CALL_FN_W_v(lval, orig) \
2615 do { \
2616 volatile OrigFn _orig = (orig); \
2617 volatile unsigned long _argvec[3+0]; \
2618 volatile unsigned long _res; \
2619 /* _argvec[0] holds current r2 across the call */ \
2620 _argvec[1] = (unsigned long)_orig.r2; \
2621 _argvec[2] = (unsigned long)_orig.nraddr; \
2622 __asm__ volatile( \
2623 VALGRIND_ALIGN_STACK \
2624 "mr 11,%1\n\t" \
2625 "std 2,-16(11)\n\t" /* save tocptr */ \
2626 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2627 "ld 11, 0(11)\n\t" /* target->r11 */ \
2628 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2629 "mr 11,%1\n\t" \
2630 "mr %0,3\n\t" \
2631 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2632 VALGRIND_RESTORE_STACK \
2633 : /*out*/ "=r" (_res) \
2634 : /*in*/ "r" (&_argvec[2]) \
2635 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2636 ); \
2637 lval = (__typeof__(lval)) _res; \
2638 } while (0)
2639
2640#define CALL_FN_W_W(lval, orig, arg1) \
2641 do { \
2642 volatile OrigFn _orig = (orig); \
2643 volatile unsigned long _argvec[3+1]; \
2644 volatile unsigned long _res; \
2645 /* _argvec[0] holds current r2 across the call */ \
2646 _argvec[1] = (unsigned long)_orig.r2; \
2647 _argvec[2] = (unsigned long)_orig.nraddr; \
2648 _argvec[2+1] = (unsigned long)arg1; \
2649 __asm__ volatile( \
2650 VALGRIND_ALIGN_STACK \
2651 "mr 11,%1\n\t" \
2652 "std 2,-16(11)\n\t" /* save tocptr */ \
2653 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2654 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2655 "ld 11, 0(11)\n\t" /* target->r11 */ \
2656 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2657 "mr 11,%1\n\t" \
2658 "mr %0,3\n\t" \
2659 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2660 VALGRIND_RESTORE_STACK \
2661 : /*out*/ "=r" (_res) \
2662 : /*in*/ "r" (&_argvec[2]) \
2663 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2664 ); \
2665 lval = (__typeof__(lval)) _res; \
2666 } while (0)
2667
2668#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2669 do { \
2670 volatile OrigFn _orig = (orig); \
2671 volatile unsigned long _argvec[3+2]; \
2672 volatile unsigned long _res; \
2673 /* _argvec[0] holds current r2 across the call */ \
2674 _argvec[1] = (unsigned long)_orig.r2; \
2675 _argvec[2] = (unsigned long)_orig.nraddr; \
2676 _argvec[2+1] = (unsigned long)arg1; \
2677 _argvec[2+2] = (unsigned long)arg2; \
2678 __asm__ volatile( \
2679 VALGRIND_ALIGN_STACK \
2680 "mr 11,%1\n\t" \
2681 "std 2,-16(11)\n\t" /* save tocptr */ \
2682 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2683 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2684 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2685 "ld 11, 0(11)\n\t" /* target->r11 */ \
2686 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2687 "mr 11,%1\n\t" \
2688 "mr %0,3\n\t" \
2689 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2690 VALGRIND_RESTORE_STACK \
2691 : /*out*/ "=r" (_res) \
2692 : /*in*/ "r" (&_argvec[2]) \
2693 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2694 ); \
2695 lval = (__typeof__(lval)) _res; \
2696 } while (0)
2697
2698#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2699 do { \
2700 volatile OrigFn _orig = (orig); \
2701 volatile unsigned long _argvec[3+3]; \
2702 volatile unsigned long _res; \
2703 /* _argvec[0] holds current r2 across the call */ \
2704 _argvec[1] = (unsigned long)_orig.r2; \
2705 _argvec[2] = (unsigned long)_orig.nraddr; \
2706 _argvec[2+1] = (unsigned long)arg1; \
2707 _argvec[2+2] = (unsigned long)arg2; \
2708 _argvec[2+3] = (unsigned long)arg3; \
2709 __asm__ volatile( \
2710 VALGRIND_ALIGN_STACK \
2711 "mr 11,%1\n\t" \
2712 "std 2,-16(11)\n\t" /* save tocptr */ \
2713 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2714 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2715 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2716 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2717 "ld 11, 0(11)\n\t" /* target->r11 */ \
2718 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2719 "mr 11,%1\n\t" \
2720 "mr %0,3\n\t" \
2721 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2722 VALGRIND_RESTORE_STACK \
2723 : /*out*/ "=r" (_res) \
2724 : /*in*/ "r" (&_argvec[2]) \
2725 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2726 ); \
2727 lval = (__typeof__(lval)) _res; \
2728 } while (0)
2729
2730#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2731 do { \
2732 volatile OrigFn _orig = (orig); \
2733 volatile unsigned long _argvec[3+4]; \
2734 volatile unsigned long _res; \
2735 /* _argvec[0] holds current r2 across the call */ \
2736 _argvec[1] = (unsigned long)_orig.r2; \
2737 _argvec[2] = (unsigned long)_orig.nraddr; \
2738 _argvec[2+1] = (unsigned long)arg1; \
2739 _argvec[2+2] = (unsigned long)arg2; \
2740 _argvec[2+3] = (unsigned long)arg3; \
2741 _argvec[2+4] = (unsigned long)arg4; \
2742 __asm__ volatile( \
2743 VALGRIND_ALIGN_STACK \
2744 "mr 11,%1\n\t" \
2745 "std 2,-16(11)\n\t" /* save tocptr */ \
2746 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2747 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2748 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2749 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2750 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2751 "ld 11, 0(11)\n\t" /* target->r11 */ \
2752 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2753 "mr 11,%1\n\t" \
2754 "mr %0,3\n\t" \
2755 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2756 VALGRIND_RESTORE_STACK \
2757 : /*out*/ "=r" (_res) \
2758 : /*in*/ "r" (&_argvec[2]) \
2759 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2760 ); \
2761 lval = (__typeof__(lval)) _res; \
2762 } while (0)
2763
2764#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2765 do { \
2766 volatile OrigFn _orig = (orig); \
2767 volatile unsigned long _argvec[3+5]; \
2768 volatile unsigned long _res; \
2769 /* _argvec[0] holds current r2 across the call */ \
2770 _argvec[1] = (unsigned long)_orig.r2; \
2771 _argvec[2] = (unsigned long)_orig.nraddr; \
2772 _argvec[2+1] = (unsigned long)arg1; \
2773 _argvec[2+2] = (unsigned long)arg2; \
2774 _argvec[2+3] = (unsigned long)arg3; \
2775 _argvec[2+4] = (unsigned long)arg4; \
2776 _argvec[2+5] = (unsigned long)arg5; \
2777 __asm__ volatile( \
2778 VALGRIND_ALIGN_STACK \
2779 "mr 11,%1\n\t" \
2780 "std 2,-16(11)\n\t" /* save tocptr */ \
2781 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2782 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2783 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2784 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2785 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2786 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2787 "ld 11, 0(11)\n\t" /* target->r11 */ \
2788 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2789 "mr 11,%1\n\t" \
2790 "mr %0,3\n\t" \
2791 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2792 VALGRIND_RESTORE_STACK \
2793 : /*out*/ "=r" (_res) \
2794 : /*in*/ "r" (&_argvec[2]) \
2795 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2796 ); \
2797 lval = (__typeof__(lval)) _res; \
2798 } while (0)
2799
2800#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2801 do { \
2802 volatile OrigFn _orig = (orig); \
2803 volatile unsigned long _argvec[3+6]; \
2804 volatile unsigned long _res; \
2805 /* _argvec[0] holds current r2 across the call */ \
2806 _argvec[1] = (unsigned long)_orig.r2; \
2807 _argvec[2] = (unsigned long)_orig.nraddr; \
2808 _argvec[2+1] = (unsigned long)arg1; \
2809 _argvec[2+2] = (unsigned long)arg2; \
2810 _argvec[2+3] = (unsigned long)arg3; \
2811 _argvec[2+4] = (unsigned long)arg4; \
2812 _argvec[2+5] = (unsigned long)arg5; \
2813 _argvec[2+6] = (unsigned long)arg6; \
2814 __asm__ volatile( \
2815 VALGRIND_ALIGN_STACK \
2816 "mr 11,%1\n\t" \
2817 "std 2,-16(11)\n\t" /* save tocptr */ \
2818 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2819 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2820 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2821 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2822 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2823 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2824 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2825 "ld 11, 0(11)\n\t" /* target->r11 */ \
2826 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2827 "mr 11,%1\n\t" \
2828 "mr %0,3\n\t" \
2829 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2830 VALGRIND_RESTORE_STACK \
2831 : /*out*/ "=r" (_res) \
2832 : /*in*/ "r" (&_argvec[2]) \
2833 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2834 ); \
2835 lval = (__typeof__(lval)) _res; \
2836 } while (0)
2837
2838#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2839 arg7) \
2840 do { \
2841 volatile OrigFn _orig = (orig); \
2842 volatile unsigned long _argvec[3+7]; \
2843 volatile unsigned long _res; \
2844 /* _argvec[0] holds current r2 across the call */ \
2845 _argvec[1] = (unsigned long)_orig.r2; \
2846 _argvec[2] = (unsigned long)_orig.nraddr; \
2847 _argvec[2+1] = (unsigned long)arg1; \
2848 _argvec[2+2] = (unsigned long)arg2; \
2849 _argvec[2+3] = (unsigned long)arg3; \
2850 _argvec[2+4] = (unsigned long)arg4; \
2851 _argvec[2+5] = (unsigned long)arg5; \
2852 _argvec[2+6] = (unsigned long)arg6; \
2853 _argvec[2+7] = (unsigned long)arg7; \
2854 __asm__ volatile( \
2855 VALGRIND_ALIGN_STACK \
2856 "mr 11,%1\n\t" \
2857 "std 2,-16(11)\n\t" /* save tocptr */ \
2858 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2859 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2860 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2861 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2862 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2863 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2864 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2865 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2866 "ld 11, 0(11)\n\t" /* target->r11 */ \
2867 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2868 "mr 11,%1\n\t" \
2869 "mr %0,3\n\t" \
2870 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2871 VALGRIND_RESTORE_STACK \
2872 : /*out*/ "=r" (_res) \
2873 : /*in*/ "r" (&_argvec[2]) \
2874 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2875 ); \
2876 lval = (__typeof__(lval)) _res; \
2877 } while (0)
2878
2879#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2880 arg7,arg8) \
2881 do { \
2882 volatile OrigFn _orig = (orig); \
2883 volatile unsigned long _argvec[3+8]; \
2884 volatile unsigned long _res; \
2885 /* _argvec[0] holds current r2 across the call */ \
2886 _argvec[1] = (unsigned long)_orig.r2; \
2887 _argvec[2] = (unsigned long)_orig.nraddr; \
2888 _argvec[2+1] = (unsigned long)arg1; \
2889 _argvec[2+2] = (unsigned long)arg2; \
2890 _argvec[2+3] = (unsigned long)arg3; \
2891 _argvec[2+4] = (unsigned long)arg4; \
2892 _argvec[2+5] = (unsigned long)arg5; \
2893 _argvec[2+6] = (unsigned long)arg6; \
2894 _argvec[2+7] = (unsigned long)arg7; \
2895 _argvec[2+8] = (unsigned long)arg8; \
2896 __asm__ volatile( \
2897 VALGRIND_ALIGN_STACK \
2898 "mr 11,%1\n\t" \
2899 "std 2,-16(11)\n\t" /* save tocptr */ \
2900 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2901 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2902 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2903 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2904 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2905 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2906 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2907 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2908 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2909 "ld 11, 0(11)\n\t" /* target->r11 */ \
2910 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2911 "mr 11,%1\n\t" \
2912 "mr %0,3\n\t" \
2913 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2914 VALGRIND_RESTORE_STACK \
2915 : /*out*/ "=r" (_res) \
2916 : /*in*/ "r" (&_argvec[2]) \
2917 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2918 ); \
2919 lval = (__typeof__(lval)) _res; \
2920 } while (0)
2921
2922#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2923 arg7,arg8,arg9) \
2924 do { \
2925 volatile OrigFn _orig = (orig); \
2926 volatile unsigned long _argvec[3+9]; \
2927 volatile unsigned long _res; \
2928 /* _argvec[0] holds current r2 across the call */ \
2929 _argvec[1] = (unsigned long)_orig.r2; \
2930 _argvec[2] = (unsigned long)_orig.nraddr; \
2931 _argvec[2+1] = (unsigned long)arg1; \
2932 _argvec[2+2] = (unsigned long)arg2; \
2933 _argvec[2+3] = (unsigned long)arg3; \
2934 _argvec[2+4] = (unsigned long)arg4; \
2935 _argvec[2+5] = (unsigned long)arg5; \
2936 _argvec[2+6] = (unsigned long)arg6; \
2937 _argvec[2+7] = (unsigned long)arg7; \
2938 _argvec[2+8] = (unsigned long)arg8; \
2939 _argvec[2+9] = (unsigned long)arg9; \
2940 __asm__ volatile( \
2941 VALGRIND_ALIGN_STACK \
2942 "mr 11,%1\n\t" \
2943 "std 2,-16(11)\n\t" /* save tocptr */ \
2944 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2945 "addi 1,1,-128\n\t" /* expand stack frame */ \
2946 /* arg9 */ \
2947 "ld 3,72(11)\n\t" \
2948 "std 3,112(1)\n\t" \
2949 /* args1-8 */ \
2950 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2951 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2952 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2953 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2954 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2955 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2956 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2957 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2958 "ld 11, 0(11)\n\t" /* target->r11 */ \
2959 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2960 "mr 11,%1\n\t" \
2961 "mr %0,3\n\t" \
2962 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2963 VALGRIND_RESTORE_STACK \
2964 : /*out*/ "=r" (_res) \
2965 : /*in*/ "r" (&_argvec[2]) \
2966 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2967 ); \
2968 lval = (__typeof__(lval)) _res; \
2969 } while (0)
2970
2971#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2972 arg7,arg8,arg9,arg10) \
2973 do { \
2974 volatile OrigFn _orig = (orig); \
2975 volatile unsigned long _argvec[3+10]; \
2976 volatile unsigned long _res; \
2977 /* _argvec[0] holds current r2 across the call */ \
2978 _argvec[1] = (unsigned long)_orig.r2; \
2979 _argvec[2] = (unsigned long)_orig.nraddr; \
2980 _argvec[2+1] = (unsigned long)arg1; \
2981 _argvec[2+2] = (unsigned long)arg2; \
2982 _argvec[2+3] = (unsigned long)arg3; \
2983 _argvec[2+4] = (unsigned long)arg4; \
2984 _argvec[2+5] = (unsigned long)arg5; \
2985 _argvec[2+6] = (unsigned long)arg6; \
2986 _argvec[2+7] = (unsigned long)arg7; \
2987 _argvec[2+8] = (unsigned long)arg8; \
2988 _argvec[2+9] = (unsigned long)arg9; \
2989 _argvec[2+10] = (unsigned long)arg10; \
2990 __asm__ volatile( \
2991 VALGRIND_ALIGN_STACK \
2992 "mr 11,%1\n\t" \
2993 "std 2,-16(11)\n\t" /* save tocptr */ \
2994 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2995 "addi 1,1,-128\n\t" /* expand stack frame */ \
2996 /* arg10 */ \
2997 "ld 3,80(11)\n\t" \
2998 "std 3,120(1)\n\t" \
2999 /* arg9 */ \
3000 "ld 3,72(11)\n\t" \
3001 "std 3,112(1)\n\t" \
3002 /* args1-8 */ \
3003 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3004 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3005 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3006 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3007 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3008 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3009 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3010 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3011 "ld 11, 0(11)\n\t" /* target->r11 */ \
3012 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3013 "mr 11,%1\n\t" \
3014 "mr %0,3\n\t" \
3015 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3016 VALGRIND_RESTORE_STACK \
3017 : /*out*/ "=r" (_res) \
3018 : /*in*/ "r" (&_argvec[2]) \
3019 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3020 ); \
3021 lval = (__typeof__(lval)) _res; \
3022 } while (0)
3023
3024#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3025 arg7,arg8,arg9,arg10,arg11) \
3026 do { \
3027 volatile OrigFn _orig = (orig); \
3028 volatile unsigned long _argvec[3+11]; \
3029 volatile unsigned long _res; \
3030 /* _argvec[0] holds current r2 across the call */ \
3031 _argvec[1] = (unsigned long)_orig.r2; \
3032 _argvec[2] = (unsigned long)_orig.nraddr; \
3033 _argvec[2+1] = (unsigned long)arg1; \
3034 _argvec[2+2] = (unsigned long)arg2; \
3035 _argvec[2+3] = (unsigned long)arg3; \
3036 _argvec[2+4] = (unsigned long)arg4; \
3037 _argvec[2+5] = (unsigned long)arg5; \
3038 _argvec[2+6] = (unsigned long)arg6; \
3039 _argvec[2+7] = (unsigned long)arg7; \
3040 _argvec[2+8] = (unsigned long)arg8; \
3041 _argvec[2+9] = (unsigned long)arg9; \
3042 _argvec[2+10] = (unsigned long)arg10; \
3043 _argvec[2+11] = (unsigned long)arg11; \
3044 __asm__ volatile( \
3045 VALGRIND_ALIGN_STACK \
3046 "mr 11,%1\n\t" \
3047 "std 2,-16(11)\n\t" /* save tocptr */ \
3048 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3049 "addi 1,1,-144\n\t" /* expand stack frame */ \
3050 /* arg11 */ \
3051 "ld 3,88(11)\n\t" \
3052 "std 3,128(1)\n\t" \
3053 /* arg10 */ \
3054 "ld 3,80(11)\n\t" \
3055 "std 3,120(1)\n\t" \
3056 /* arg9 */ \
3057 "ld 3,72(11)\n\t" \
3058 "std 3,112(1)\n\t" \
3059 /* args1-8 */ \
3060 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3061 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3062 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3063 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3064 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3065 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3066 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3067 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3068 "ld 11, 0(11)\n\t" /* target->r11 */ \
3069 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3070 "mr 11,%1\n\t" \
3071 "mr %0,3\n\t" \
3072 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3073 VALGRIND_RESTORE_STACK \
3074 : /*out*/ "=r" (_res) \
3075 : /*in*/ "r" (&_argvec[2]) \
3076 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3077 ); \
3078 lval = (__typeof__(lval)) _res; \
3079 } while (0)
3080
3081#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3082 arg7,arg8,arg9,arg10,arg11,arg12) \
3083 do { \
3084 volatile OrigFn _orig = (orig); \
3085 volatile unsigned long _argvec[3+12]; \
3086 volatile unsigned long _res; \
3087 /* _argvec[0] holds current r2 across the call */ \
3088 _argvec[1] = (unsigned long)_orig.r2; \
3089 _argvec[2] = (unsigned long)_orig.nraddr; \
3090 _argvec[2+1] = (unsigned long)arg1; \
3091 _argvec[2+2] = (unsigned long)arg2; \
3092 _argvec[2+3] = (unsigned long)arg3; \
3093 _argvec[2+4] = (unsigned long)arg4; \
3094 _argvec[2+5] = (unsigned long)arg5; \
3095 _argvec[2+6] = (unsigned long)arg6; \
3096 _argvec[2+7] = (unsigned long)arg7; \
3097 _argvec[2+8] = (unsigned long)arg8; \
3098 _argvec[2+9] = (unsigned long)arg9; \
3099 _argvec[2+10] = (unsigned long)arg10; \
3100 _argvec[2+11] = (unsigned long)arg11; \
3101 _argvec[2+12] = (unsigned long)arg12; \
3102 __asm__ volatile( \
3103 VALGRIND_ALIGN_STACK \
3104 "mr 11,%1\n\t" \
3105 "std 2,-16(11)\n\t" /* save tocptr */ \
3106 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3107 "addi 1,1,-144\n\t" /* expand stack frame */ \
3108 /* arg12 */ \
3109 "ld 3,96(11)\n\t" \
3110 "std 3,136(1)\n\t" \
3111 /* arg11 */ \
3112 "ld 3,88(11)\n\t" \
3113 "std 3,128(1)\n\t" \
3114 /* arg10 */ \
3115 "ld 3,80(11)\n\t" \
3116 "std 3,120(1)\n\t" \
3117 /* arg9 */ \
3118 "ld 3,72(11)\n\t" \
3119 "std 3,112(1)\n\t" \
3120 /* args1-8 */ \
3121 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3122 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3123 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3124 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3125 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3126 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3127 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3128 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3129 "ld 11, 0(11)\n\t" /* target->r11 */ \
3130 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3131 "mr 11,%1\n\t" \
3132 "mr %0,3\n\t" \
3133 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3134 VALGRIND_RESTORE_STACK \
3135 : /*out*/ "=r" (_res) \
3136 : /*in*/ "r" (&_argvec[2]) \
3137 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3138 ); \
3139 lval = (__typeof__(lval)) _res; \
3140 } while (0)
3141
3142#endif /* PLAT_ppc64be_linux */
3143
3144/* ------------------------- ppc64le-linux ----------------------- */
3145#if defined(PLAT_ppc64le_linux)
3146
3147/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3148
3149/* These regs are trashed by the hidden call. */
3150#define __CALLER_SAVED_REGS \
3151 "lr", "ctr", "xer", \
3152 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3153 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3154 "r11", "r12", "r13"
3155
3156/* Macros to save and align the stack before making a function
3157 call and restore it afterwards as gcc may not keep the stack
3158 pointer aligned if it doesn't realise calls are being made
3159 to other functions. */
3160
3161#define VALGRIND_ALIGN_STACK \
3162 "mr 28,1\n\t" \
3163 "rldicr 1,1,0,59\n\t"
3164#define VALGRIND_RESTORE_STACK \
3165 "mr 1,28\n\t"
3166
3167/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3168 long) == 8. */
3169
3170#define CALL_FN_W_v(lval, orig) \
3171 do { \
3172 volatile OrigFn _orig = (orig); \
3173 volatile unsigned long _argvec[3+0]; \
3174 volatile unsigned long _res; \
3175 /* _argvec[0] holds current r2 across the call */ \
3176 _argvec[1] = (unsigned long)_orig.r2; \
3177 _argvec[2] = (unsigned long)_orig.nraddr; \
3178 __asm__ volatile( \
3179 VALGRIND_ALIGN_STACK \
3180 "mr 12,%1\n\t" \
3181 "std 2,-16(12)\n\t" /* save tocptr */ \
3182 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3183 "ld 12, 0(12)\n\t" /* target->r12 */ \
3184 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3185 "mr 12,%1\n\t" \
3186 "mr %0,3\n\t" \
3187 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3188 VALGRIND_RESTORE_STACK \
3189 : /*out*/ "=r" (_res) \
3190 : /*in*/ "r" (&_argvec[2]) \
3191 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3192 ); \
3193 lval = (__typeof__(lval)) _res; \
3194 } while (0)
3195
3196#define CALL_FN_W_W(lval, orig, arg1) \
3197 do { \
3198 volatile OrigFn _orig = (orig); \
3199 volatile unsigned long _argvec[3+1]; \
3200 volatile unsigned long _res; \
3201 /* _argvec[0] holds current r2 across the call */ \
3202 _argvec[1] = (unsigned long)_orig.r2; \
3203 _argvec[2] = (unsigned long)_orig.nraddr; \
3204 _argvec[2+1] = (unsigned long)arg1; \
3205 __asm__ volatile( \
3206 VALGRIND_ALIGN_STACK \
3207 "mr 12,%1\n\t" \
3208 "std 2,-16(12)\n\t" /* save tocptr */ \
3209 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3210 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3211 "ld 12, 0(12)\n\t" /* target->r12 */ \
3212 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3213 "mr 12,%1\n\t" \
3214 "mr %0,3\n\t" \
3215 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3216 VALGRIND_RESTORE_STACK \
3217 : /*out*/ "=r" (_res) \
3218 : /*in*/ "r" (&_argvec[2]) \
3219 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3220 ); \
3221 lval = (__typeof__(lval)) _res; \
3222 } while (0)
3223
3224#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3225 do { \
3226 volatile OrigFn _orig = (orig); \
3227 volatile unsigned long _argvec[3+2]; \
3228 volatile unsigned long _res; \
3229 /* _argvec[0] holds current r2 across the call */ \
3230 _argvec[1] = (unsigned long)_orig.r2; \
3231 _argvec[2] = (unsigned long)_orig.nraddr; \
3232 _argvec[2+1] = (unsigned long)arg1; \
3233 _argvec[2+2] = (unsigned long)arg2; \
3234 __asm__ volatile( \
3235 VALGRIND_ALIGN_STACK \
3236 "mr 12,%1\n\t" \
3237 "std 2,-16(12)\n\t" /* save tocptr */ \
3238 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3239 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3240 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3241 "ld 12, 0(12)\n\t" /* target->r12 */ \
3242 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3243 "mr 12,%1\n\t" \
3244 "mr %0,3\n\t" \
3245 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3246 VALGRIND_RESTORE_STACK \
3247 : /*out*/ "=r" (_res) \
3248 : /*in*/ "r" (&_argvec[2]) \
3249 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3250 ); \
3251 lval = (__typeof__(lval)) _res; \
3252 } while (0)
3253
3254#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3255 do { \
3256 volatile OrigFn _orig = (orig); \
3257 volatile unsigned long _argvec[3+3]; \
3258 volatile unsigned long _res; \
3259 /* _argvec[0] holds current r2 across the call */ \
3260 _argvec[1] = (unsigned long)_orig.r2; \
3261 _argvec[2] = (unsigned long)_orig.nraddr; \
3262 _argvec[2+1] = (unsigned long)arg1; \
3263 _argvec[2+2] = (unsigned long)arg2; \
3264 _argvec[2+3] = (unsigned long)arg3; \
3265 __asm__ volatile( \
3266 VALGRIND_ALIGN_STACK \
3267 "mr 12,%1\n\t" \
3268 "std 2,-16(12)\n\t" /* save tocptr */ \
3269 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3270 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3271 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3272 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3273 "ld 12, 0(12)\n\t" /* target->r12 */ \
3274 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3275 "mr 12,%1\n\t" \
3276 "mr %0,3\n\t" \
3277 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3278 VALGRIND_RESTORE_STACK \
3279 : /*out*/ "=r" (_res) \
3280 : /*in*/ "r" (&_argvec[2]) \
3281 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3282 ); \
3283 lval = (__typeof__(lval)) _res; \
3284 } while (0)
3285
3286#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3287 do { \
3288 volatile OrigFn _orig = (orig); \
3289 volatile unsigned long _argvec[3+4]; \
3290 volatile unsigned long _res; \
3291 /* _argvec[0] holds current r2 across the call */ \
3292 _argvec[1] = (unsigned long)_orig.r2; \
3293 _argvec[2] = (unsigned long)_orig.nraddr; \
3294 _argvec[2+1] = (unsigned long)arg1; \
3295 _argvec[2+2] = (unsigned long)arg2; \
3296 _argvec[2+3] = (unsigned long)arg3; \
3297 _argvec[2+4] = (unsigned long)arg4; \
3298 __asm__ volatile( \
3299 VALGRIND_ALIGN_STACK \
3300 "mr 12,%1\n\t" \
3301 "std 2,-16(12)\n\t" /* save tocptr */ \
3302 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3303 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3304 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3305 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3306 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3307 "ld 12, 0(12)\n\t" /* target->r12 */ \
3308 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3309 "mr 12,%1\n\t" \
3310 "mr %0,3\n\t" \
3311 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3312 VALGRIND_RESTORE_STACK \
3313 : /*out*/ "=r" (_res) \
3314 : /*in*/ "r" (&_argvec[2]) \
3315 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3316 ); \
3317 lval = (__typeof__(lval)) _res; \
3318 } while (0)
3319
3320#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3321 do { \
3322 volatile OrigFn _orig = (orig); \
3323 volatile unsigned long _argvec[3+5]; \
3324 volatile unsigned long _res; \
3325 /* _argvec[0] holds current r2 across the call */ \
3326 _argvec[1] = (unsigned long)_orig.r2; \
3327 _argvec[2] = (unsigned long)_orig.nraddr; \
3328 _argvec[2+1] = (unsigned long)arg1; \
3329 _argvec[2+2] = (unsigned long)arg2; \
3330 _argvec[2+3] = (unsigned long)arg3; \
3331 _argvec[2+4] = (unsigned long)arg4; \
3332 _argvec[2+5] = (unsigned long)arg5; \
3333 __asm__ volatile( \
3334 VALGRIND_ALIGN_STACK \
3335 "mr 12,%1\n\t" \
3336 "std 2,-16(12)\n\t" /* save tocptr */ \
3337 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3338 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3339 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3340 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3341 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3342 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3343 "ld 12, 0(12)\n\t" /* target->r12 */ \
3344 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3345 "mr 12,%1\n\t" \
3346 "mr %0,3\n\t" \
3347 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3348 VALGRIND_RESTORE_STACK \
3349 : /*out*/ "=r" (_res) \
3350 : /*in*/ "r" (&_argvec[2]) \
3351 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3352 ); \
3353 lval = (__typeof__(lval)) _res; \
3354 } while (0)
3355
3356#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3357 do { \
3358 volatile OrigFn _orig = (orig); \
3359 volatile unsigned long _argvec[3+6]; \
3360 volatile unsigned long _res; \
3361 /* _argvec[0] holds current r2 across the call */ \
3362 _argvec[1] = (unsigned long)_orig.r2; \
3363 _argvec[2] = (unsigned long)_orig.nraddr; \
3364 _argvec[2+1] = (unsigned long)arg1; \
3365 _argvec[2+2] = (unsigned long)arg2; \
3366 _argvec[2+3] = (unsigned long)arg3; \
3367 _argvec[2+4] = (unsigned long)arg4; \
3368 _argvec[2+5] = (unsigned long)arg5; \
3369 _argvec[2+6] = (unsigned long)arg6; \
3370 __asm__ volatile( \
3371 VALGRIND_ALIGN_STACK \
3372 "mr 12,%1\n\t" \
3373 "std 2,-16(12)\n\t" /* save tocptr */ \
3374 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3375 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3376 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3377 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3378 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3379 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3380 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3381 "ld 12, 0(12)\n\t" /* target->r12 */ \
3382 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3383 "mr 12,%1\n\t" \
3384 "mr %0,3\n\t" \
3385 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3386 VALGRIND_RESTORE_STACK \
3387 : /*out*/ "=r" (_res) \
3388 : /*in*/ "r" (&_argvec[2]) \
3389 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3390 ); \
3391 lval = (__typeof__(lval)) _res; \
3392 } while (0)
3393
3394#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3395 arg7) \
3396 do { \
3397 volatile OrigFn _orig = (orig); \
3398 volatile unsigned long _argvec[3+7]; \
3399 volatile unsigned long _res; \
3400 /* _argvec[0] holds current r2 across the call */ \
3401 _argvec[1] = (unsigned long)_orig.r2; \
3402 _argvec[2] = (unsigned long)_orig.nraddr; \
3403 _argvec[2+1] = (unsigned long)arg1; \
3404 _argvec[2+2] = (unsigned long)arg2; \
3405 _argvec[2+3] = (unsigned long)arg3; \
3406 _argvec[2+4] = (unsigned long)arg4; \
3407 _argvec[2+5] = (unsigned long)arg5; \
3408 _argvec[2+6] = (unsigned long)arg6; \
3409 _argvec[2+7] = (unsigned long)arg7; \
3410 __asm__ volatile( \
3411 VALGRIND_ALIGN_STACK \
3412 "mr 12,%1\n\t" \
3413 "std 2,-16(12)\n\t" /* save tocptr */ \
3414 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3415 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3416 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3417 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3418 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3419 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3420 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3421 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3422 "ld 12, 0(12)\n\t" /* target->r12 */ \
3423 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3424 "mr 12,%1\n\t" \
3425 "mr %0,3\n\t" \
3426 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3427 VALGRIND_RESTORE_STACK \
3428 : /*out*/ "=r" (_res) \
3429 : /*in*/ "r" (&_argvec[2]) \
3430 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3431 ); \
3432 lval = (__typeof__(lval)) _res; \
3433 } while (0)
3434
3435#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3436 arg7,arg8) \
3437 do { \
3438 volatile OrigFn _orig = (orig); \
3439 volatile unsigned long _argvec[3+8]; \
3440 volatile unsigned long _res; \
3441 /* _argvec[0] holds current r2 across the call */ \
3442 _argvec[1] = (unsigned long)_orig.r2; \
3443 _argvec[2] = (unsigned long)_orig.nraddr; \
3444 _argvec[2+1] = (unsigned long)arg1; \
3445 _argvec[2+2] = (unsigned long)arg2; \
3446 _argvec[2+3] = (unsigned long)arg3; \
3447 _argvec[2+4] = (unsigned long)arg4; \
3448 _argvec[2+5] = (unsigned long)arg5; \
3449 _argvec[2+6] = (unsigned long)arg6; \
3450 _argvec[2+7] = (unsigned long)arg7; \
3451 _argvec[2+8] = (unsigned long)arg8; \
3452 __asm__ volatile( \
3453 VALGRIND_ALIGN_STACK \
3454 "mr 12,%1\n\t" \
3455 "std 2,-16(12)\n\t" /* save tocptr */ \
3456 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3457 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3458 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3459 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3460 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3461 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3462 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3463 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3464 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3465 "ld 12, 0(12)\n\t" /* target->r12 */ \
3466 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3467 "mr 12,%1\n\t" \
3468 "mr %0,3\n\t" \
3469 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3470 VALGRIND_RESTORE_STACK \
3471 : /*out*/ "=r" (_res) \
3472 : /*in*/ "r" (&_argvec[2]) \
3473 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3474 ); \
3475 lval = (__typeof__(lval)) _res; \
3476 } while (0)
3477
3478#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3479 arg7,arg8,arg9) \
3480 do { \
3481 volatile OrigFn _orig = (orig); \
3482 volatile unsigned long _argvec[3+9]; \
3483 volatile unsigned long _res; \
3484 /* _argvec[0] holds current r2 across the call */ \
3485 _argvec[1] = (unsigned long)_orig.r2; \
3486 _argvec[2] = (unsigned long)_orig.nraddr; \
3487 _argvec[2+1] = (unsigned long)arg1; \
3488 _argvec[2+2] = (unsigned long)arg2; \
3489 _argvec[2+3] = (unsigned long)arg3; \
3490 _argvec[2+4] = (unsigned long)arg4; \
3491 _argvec[2+5] = (unsigned long)arg5; \
3492 _argvec[2+6] = (unsigned long)arg6; \
3493 _argvec[2+7] = (unsigned long)arg7; \
3494 _argvec[2+8] = (unsigned long)arg8; \
3495 _argvec[2+9] = (unsigned long)arg9; \
3496 __asm__ volatile( \
3497 VALGRIND_ALIGN_STACK \
3498 "mr 12,%1\n\t" \
3499 "std 2,-16(12)\n\t" /* save tocptr */ \
3500 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3501 "addi 1,1,-128\n\t" /* expand stack frame */ \
3502 /* arg9 */ \
3503 "ld 3,72(12)\n\t" \
3504 "std 3,96(1)\n\t" \
3505 /* args1-8 */ \
3506 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3507 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3508 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3509 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3510 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3511 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3512 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3513 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3514 "ld 12, 0(12)\n\t" /* target->r12 */ \
3515 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3516 "mr 12,%1\n\t" \
3517 "mr %0,3\n\t" \
3518 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3519 VALGRIND_RESTORE_STACK \
3520 : /*out*/ "=r" (_res) \
3521 : /*in*/ "r" (&_argvec[2]) \
3522 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3523 ); \
3524 lval = (__typeof__(lval)) _res; \
3525 } while (0)
3526
3527#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3528 arg7,arg8,arg9,arg10) \
3529 do { \
3530 volatile OrigFn _orig = (orig); \
3531 volatile unsigned long _argvec[3+10]; \
3532 volatile unsigned long _res; \
3533 /* _argvec[0] holds current r2 across the call */ \
3534 _argvec[1] = (unsigned long)_orig.r2; \
3535 _argvec[2] = (unsigned long)_orig.nraddr; \
3536 _argvec[2+1] = (unsigned long)arg1; \
3537 _argvec[2+2] = (unsigned long)arg2; \
3538 _argvec[2+3] = (unsigned long)arg3; \
3539 _argvec[2+4] = (unsigned long)arg4; \
3540 _argvec[2+5] = (unsigned long)arg5; \
3541 _argvec[2+6] = (unsigned long)arg6; \
3542 _argvec[2+7] = (unsigned long)arg7; \
3543 _argvec[2+8] = (unsigned long)arg8; \
3544 _argvec[2+9] = (unsigned long)arg9; \
3545 _argvec[2+10] = (unsigned long)arg10; \
3546 __asm__ volatile( \
3547 VALGRIND_ALIGN_STACK \
3548 "mr 12,%1\n\t" \
3549 "std 2,-16(12)\n\t" /* save tocptr */ \
3550 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3551 "addi 1,1,-128\n\t" /* expand stack frame */ \
3552 /* arg10 */ \
3553 "ld 3,80(12)\n\t" \
3554 "std 3,104(1)\n\t" \
3555 /* arg9 */ \
3556 "ld 3,72(12)\n\t" \
3557 "std 3,96(1)\n\t" \
3558 /* args1-8 */ \
3559 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3560 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3561 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3562 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3563 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3564 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3565 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3566 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3567 "ld 12, 0(12)\n\t" /* target->r12 */ \
3568 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3569 "mr 12,%1\n\t" \
3570 "mr %0,3\n\t" \
3571 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3572 VALGRIND_RESTORE_STACK \
3573 : /*out*/ "=r" (_res) \
3574 : /*in*/ "r" (&_argvec[2]) \
3575 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3576 ); \
3577 lval = (__typeof__(lval)) _res; \
3578 } while (0)
3579
3580#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3581 arg7,arg8,arg9,arg10,arg11) \
3582 do { \
3583 volatile OrigFn _orig = (orig); \
3584 volatile unsigned long _argvec[3+11]; \
3585 volatile unsigned long _res; \
3586 /* _argvec[0] holds current r2 across the call */ \
3587 _argvec[1] = (unsigned long)_orig.r2; \
3588 _argvec[2] = (unsigned long)_orig.nraddr; \
3589 _argvec[2+1] = (unsigned long)arg1; \
3590 _argvec[2+2] = (unsigned long)arg2; \
3591 _argvec[2+3] = (unsigned long)arg3; \
3592 _argvec[2+4] = (unsigned long)arg4; \
3593 _argvec[2+5] = (unsigned long)arg5; \
3594 _argvec[2+6] = (unsigned long)arg6; \
3595 _argvec[2+7] = (unsigned long)arg7; \
3596 _argvec[2+8] = (unsigned long)arg8; \
3597 _argvec[2+9] = (unsigned long)arg9; \
3598 _argvec[2+10] = (unsigned long)arg10; \
3599 _argvec[2+11] = (unsigned long)arg11; \
3600 __asm__ volatile( \
3601 VALGRIND_ALIGN_STACK \
3602 "mr 12,%1\n\t" \
3603 "std 2,-16(12)\n\t" /* save tocptr */ \
3604 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3605 "addi 1,1,-144\n\t" /* expand stack frame */ \
3606 /* arg11 */ \
3607 "ld 3,88(12)\n\t" \
3608 "std 3,112(1)\n\t" \
3609 /* arg10 */ \
3610 "ld 3,80(12)\n\t" \
3611 "std 3,104(1)\n\t" \
3612 /* arg9 */ \
3613 "ld 3,72(12)\n\t" \
3614 "std 3,96(1)\n\t" \
3615 /* args1-8 */ \
3616 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3617 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3618 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3619 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3620 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3621 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3622 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3623 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3624 "ld 12, 0(12)\n\t" /* target->r12 */ \
3625 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3626 "mr 12,%1\n\t" \
3627 "mr %0,3\n\t" \
3628 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3629 VALGRIND_RESTORE_STACK \
3630 : /*out*/ "=r" (_res) \
3631 : /*in*/ "r" (&_argvec[2]) \
3632 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3633 ); \
3634 lval = (__typeof__(lval)) _res; \
3635 } while (0)
3636
3637#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3638 arg7,arg8,arg9,arg10,arg11,arg12) \
3639 do { \
3640 volatile OrigFn _orig = (orig); \
3641 volatile unsigned long _argvec[3+12]; \
3642 volatile unsigned long _res; \
3643 /* _argvec[0] holds current r2 across the call */ \
3644 _argvec[1] = (unsigned long)_orig.r2; \
3645 _argvec[2] = (unsigned long)_orig.nraddr; \
3646 _argvec[2+1] = (unsigned long)arg1; \
3647 _argvec[2+2] = (unsigned long)arg2; \
3648 _argvec[2+3] = (unsigned long)arg3; \
3649 _argvec[2+4] = (unsigned long)arg4; \
3650 _argvec[2+5] = (unsigned long)arg5; \
3651 _argvec[2+6] = (unsigned long)arg6; \
3652 _argvec[2+7] = (unsigned long)arg7; \
3653 _argvec[2+8] = (unsigned long)arg8; \
3654 _argvec[2+9] = (unsigned long)arg9; \
3655 _argvec[2+10] = (unsigned long)arg10; \
3656 _argvec[2+11] = (unsigned long)arg11; \
3657 _argvec[2+12] = (unsigned long)arg12; \
3658 __asm__ volatile( \
3659 VALGRIND_ALIGN_STACK \
3660 "mr 12,%1\n\t" \
3661 "std 2,-16(12)\n\t" /* save tocptr */ \
3662 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3663 "addi 1,1,-144\n\t" /* expand stack frame */ \
3664 /* arg12 */ \
3665 "ld 3,96(12)\n\t" \
3666 "std 3,120(1)\n\t" \
3667 /* arg11 */ \
3668 "ld 3,88(12)\n\t" \
3669 "std 3,112(1)\n\t" \
3670 /* arg10 */ \
3671 "ld 3,80(12)\n\t" \
3672 "std 3,104(1)\n\t" \
3673 /* arg9 */ \
3674 "ld 3,72(12)\n\t" \
3675 "std 3,96(1)\n\t" \
3676 /* args1-8 */ \
3677 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3678 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3679 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3680 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3681 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3682 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3683 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3684 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3685 "ld 12, 0(12)\n\t" /* target->r12 */ \
3686 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3687 "mr 12,%1\n\t" \
3688 "mr %0,3\n\t" \
3689 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3690 VALGRIND_RESTORE_STACK \
3691 : /*out*/ "=r" (_res) \
3692 : /*in*/ "r" (&_argvec[2]) \
3693 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3694 ); \
3695 lval = (__typeof__(lval)) _res; \
3696 } while (0)
3697
3698#endif /* PLAT_ppc64le_linux */
3699
3700/* ------------------------- arm-linux ------------------------- */
3701
3702#if defined(PLAT_arm_linux)
3703
3704/* These regs are trashed by the hidden call. */
3705#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3706
3707/* Macros to save and align the stack before making a function
3708 call and restore it afterwards as gcc may not keep the stack
3709 pointer aligned if it doesn't realise calls are being made
3710 to other functions. */
3711
3712/* This is a bit tricky. We store the original stack pointer in r10
3713 as it is callee-saves. gcc doesn't allow the use of r11 for some
3714 reason. Also, we can't directly "bic" the stack pointer in thumb
3715 mode since r13 isn't an allowed register number in that context.
3716 So use r4 as a temporary, since that is about to get trashed
3717 anyway, just after each use of this macro. Side effect is we need
3718 to be very careful about any future changes, since
3719 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3720#define VALGRIND_ALIGN_STACK \
3721 "mov r10, sp\n\t" \
3722 "mov r4, sp\n\t" \
3723 "bic r4, r4, #7\n\t" \
3724 "mov sp, r4\n\t"
3725#define VALGRIND_RESTORE_STACK \
3726 "mov sp, r10\n\t"
3727
3728/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3729 long) == 4. */
3730
3731#define CALL_FN_W_v(lval, orig) \
3732 do { \
3733 volatile OrigFn _orig = (orig); \
3734 volatile unsigned long _argvec[1]; \
3735 volatile unsigned long _res; \
3736 _argvec[0] = (unsigned long)_orig.nraddr; \
3737 __asm__ volatile( \
3738 VALGRIND_ALIGN_STACK \
3739 "ldr r4, [%1] \n\t" /* target->r4 */ \
3740 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3741 VALGRIND_RESTORE_STACK \
3742 "mov %0, r0\n" \
3743 : /*out*/ "=r" (_res) \
3744 : /*in*/ "0" (&_argvec[0]) \
3745 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3746 ); \
3747 lval = (__typeof__(lval)) _res; \
3748 } while (0)
3749
3750#define CALL_FN_W_W(lval, orig, arg1) \
3751 do { \
3752 volatile OrigFn _orig = (orig); \
3753 volatile unsigned long _argvec[2]; \
3754 volatile unsigned long _res; \
3755 _argvec[0] = (unsigned long)_orig.nraddr; \
3756 _argvec[1] = (unsigned long)(arg1); \
3757 __asm__ volatile( \
3758 VALGRIND_ALIGN_STACK \
3759 "ldr r0, [%1, #4] \n\t" \
3760 "ldr r4, [%1] \n\t" /* target->r4 */ \
3761 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3762 VALGRIND_RESTORE_STACK \
3763 "mov %0, r0\n" \
3764 : /*out*/ "=r" (_res) \
3765 : /*in*/ "0" (&_argvec[0]) \
3766 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3767 ); \
3768 lval = (__typeof__(lval)) _res; \
3769 } while (0)
3770
3771#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3772 do { \
3773 volatile OrigFn _orig = (orig); \
3774 volatile unsigned long _argvec[3]; \
3775 volatile unsigned long _res; \
3776 _argvec[0] = (unsigned long)_orig.nraddr; \
3777 _argvec[1] = (unsigned long)(arg1); \
3778 _argvec[2] = (unsigned long)(arg2); \
3779 __asm__ volatile( \
3780 VALGRIND_ALIGN_STACK \
3781 "ldr r0, [%1, #4] \n\t" \
3782 "ldr r1, [%1, #8] \n\t" \
3783 "ldr r4, [%1] \n\t" /* target->r4 */ \
3784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3785 VALGRIND_RESTORE_STACK \
3786 "mov %0, r0\n" \
3787 : /*out*/ "=r" (_res) \
3788 : /*in*/ "0" (&_argvec[0]) \
3789 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3790 ); \
3791 lval = (__typeof__(lval)) _res; \
3792 } while (0)
3793
3794#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3795 do { \
3796 volatile OrigFn _orig = (orig); \
3797 volatile unsigned long _argvec[4]; \
3798 volatile unsigned long _res; \
3799 _argvec[0] = (unsigned long)_orig.nraddr; \
3800 _argvec[1] = (unsigned long)(arg1); \
3801 _argvec[2] = (unsigned long)(arg2); \
3802 _argvec[3] = (unsigned long)(arg3); \
3803 __asm__ volatile( \
3804 VALGRIND_ALIGN_STACK \
3805 "ldr r0, [%1, #4] \n\t" \
3806 "ldr r1, [%1, #8] \n\t" \
3807 "ldr r2, [%1, #12] \n\t" \
3808 "ldr r4, [%1] \n\t" /* target->r4 */ \
3809 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3810 VALGRIND_RESTORE_STACK \
3811 "mov %0, r0\n" \
3812 : /*out*/ "=r" (_res) \
3813 : /*in*/ "0" (&_argvec[0]) \
3814 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3815 ); \
3816 lval = (__typeof__(lval)) _res; \
3817 } while (0)
3818
3819#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3820 do { \
3821 volatile OrigFn _orig = (orig); \
3822 volatile unsigned long _argvec[5]; \
3823 volatile unsigned long _res; \
3824 _argvec[0] = (unsigned long)_orig.nraddr; \
3825 _argvec[1] = (unsigned long)(arg1); \
3826 _argvec[2] = (unsigned long)(arg2); \
3827 _argvec[3] = (unsigned long)(arg3); \
3828 _argvec[4] = (unsigned long)(arg4); \
3829 __asm__ volatile( \
3830 VALGRIND_ALIGN_STACK \
3831 "ldr r0, [%1, #4] \n\t" \
3832 "ldr r1, [%1, #8] \n\t" \
3833 "ldr r2, [%1, #12] \n\t" \
3834 "ldr r3, [%1, #16] \n\t" \
3835 "ldr r4, [%1] \n\t" /* target->r4 */ \
3836 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3837 VALGRIND_RESTORE_STACK \
3838 "mov %0, r0" \
3839 : /*out*/ "=r" (_res) \
3840 : /*in*/ "0" (&_argvec[0]) \
3841 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3842 ); \
3843 lval = (__typeof__(lval)) _res; \
3844 } while (0)
3845
3846#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3847 do { \
3848 volatile OrigFn _orig = (orig); \
3849 volatile unsigned long _argvec[6]; \
3850 volatile unsigned long _res; \
3851 _argvec[0] = (unsigned long)_orig.nraddr; \
3852 _argvec[1] = (unsigned long)(arg1); \
3853 _argvec[2] = (unsigned long)(arg2); \
3854 _argvec[3] = (unsigned long)(arg3); \
3855 _argvec[4] = (unsigned long)(arg4); \
3856 _argvec[5] = (unsigned long)(arg5); \
3857 __asm__ volatile( \
3858 VALGRIND_ALIGN_STACK \
3859 "sub sp, sp, #4 \n\t" \
3860 "ldr r0, [%1, #20] \n\t" \
3861 "push {r0} \n\t" \
3862 "ldr r0, [%1, #4] \n\t" \
3863 "ldr r1, [%1, #8] \n\t" \
3864 "ldr r2, [%1, #12] \n\t" \
3865 "ldr r3, [%1, #16] \n\t" \
3866 "ldr r4, [%1] \n\t" /* target->r4 */ \
3867 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3868 VALGRIND_RESTORE_STACK \
3869 "mov %0, r0" \
3870 : /*out*/ "=r" (_res) \
3871 : /*in*/ "0" (&_argvec[0]) \
3872 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3873 ); \
3874 lval = (__typeof__(lval)) _res; \
3875 } while (0)
3876
3877#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3878 do { \
3879 volatile OrigFn _orig = (orig); \
3880 volatile unsigned long _argvec[7]; \
3881 volatile unsigned long _res; \
3882 _argvec[0] = (unsigned long)_orig.nraddr; \
3883 _argvec[1] = (unsigned long)(arg1); \
3884 _argvec[2] = (unsigned long)(arg2); \
3885 _argvec[3] = (unsigned long)(arg3); \
3886 _argvec[4] = (unsigned long)(arg4); \
3887 _argvec[5] = (unsigned long)(arg5); \
3888 _argvec[6] = (unsigned long)(arg6); \
3889 __asm__ volatile( \
3890 VALGRIND_ALIGN_STACK \
3891 "ldr r0, [%1, #20] \n\t" \
3892 "ldr r1, [%1, #24] \n\t" \
3893 "push {r0, r1} \n\t" \
3894 "ldr r0, [%1, #4] \n\t" \
3895 "ldr r1, [%1, #8] \n\t" \
3896 "ldr r2, [%1, #12] \n\t" \
3897 "ldr r3, [%1, #16] \n\t" \
3898 "ldr r4, [%1] \n\t" /* target->r4 */ \
3899 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3900 VALGRIND_RESTORE_STACK \
3901 "mov %0, r0" \
3902 : /*out*/ "=r" (_res) \
3903 : /*in*/ "0" (&_argvec[0]) \
3904 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3905 ); \
3906 lval = (__typeof__(lval)) _res; \
3907 } while (0)
3908
3909#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3910 arg7) \
3911 do { \
3912 volatile OrigFn _orig = (orig); \
3913 volatile unsigned long _argvec[8]; \
3914 volatile unsigned long _res; \
3915 _argvec[0] = (unsigned long)_orig.nraddr; \
3916 _argvec[1] = (unsigned long)(arg1); \
3917 _argvec[2] = (unsigned long)(arg2); \
3918 _argvec[3] = (unsigned long)(arg3); \
3919 _argvec[4] = (unsigned long)(arg4); \
3920 _argvec[5] = (unsigned long)(arg5); \
3921 _argvec[6] = (unsigned long)(arg6); \
3922 _argvec[7] = (unsigned long)(arg7); \
3923 __asm__ volatile( \
3924 VALGRIND_ALIGN_STACK \
3925 "sub sp, sp, #4 \n\t" \
3926 "ldr r0, [%1, #20] \n\t" \
3927 "ldr r1, [%1, #24] \n\t" \
3928 "ldr r2, [%1, #28] \n\t" \
3929 "push {r0, r1, r2} \n\t" \
3930 "ldr r0, [%1, #4] \n\t" \
3931 "ldr r1, [%1, #8] \n\t" \
3932 "ldr r2, [%1, #12] \n\t" \
3933 "ldr r3, [%1, #16] \n\t" \
3934 "ldr r4, [%1] \n\t" /* target->r4 */ \
3935 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3936 VALGRIND_RESTORE_STACK \
3937 "mov %0, r0" \
3938 : /*out*/ "=r" (_res) \
3939 : /*in*/ "0" (&_argvec[0]) \
3940 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3941 ); \
3942 lval = (__typeof__(lval)) _res; \
3943 } while (0)
3944
3945#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3946 arg7,arg8) \
3947 do { \
3948 volatile OrigFn _orig = (orig); \
3949 volatile unsigned long _argvec[9]; \
3950 volatile unsigned long _res; \
3951 _argvec[0] = (unsigned long)_orig.nraddr; \
3952 _argvec[1] = (unsigned long)(arg1); \
3953 _argvec[2] = (unsigned long)(arg2); \
3954 _argvec[3] = (unsigned long)(arg3); \
3955 _argvec[4] = (unsigned long)(arg4); \
3956 _argvec[5] = (unsigned long)(arg5); \
3957 _argvec[6] = (unsigned long)(arg6); \
3958 _argvec[7] = (unsigned long)(arg7); \
3959 _argvec[8] = (unsigned long)(arg8); \
3960 __asm__ volatile( \
3961 VALGRIND_ALIGN_STACK \
3962 "ldr r0, [%1, #20] \n\t" \
3963 "ldr r1, [%1, #24] \n\t" \
3964 "ldr r2, [%1, #28] \n\t" \
3965 "ldr r3, [%1, #32] \n\t" \
3966 "push {r0, r1, r2, r3} \n\t" \
3967 "ldr r0, [%1, #4] \n\t" \
3968 "ldr r1, [%1, #8] \n\t" \
3969 "ldr r2, [%1, #12] \n\t" \
3970 "ldr r3, [%1, #16] \n\t" \
3971 "ldr r4, [%1] \n\t" /* target->r4 */ \
3972 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3973 VALGRIND_RESTORE_STACK \
3974 "mov %0, r0" \
3975 : /*out*/ "=r" (_res) \
3976 : /*in*/ "0" (&_argvec[0]) \
3977 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3978 ); \
3979 lval = (__typeof__(lval)) _res; \
3980 } while (0)
3981
3982#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3983 arg7,arg8,arg9) \
3984 do { \
3985 volatile OrigFn _orig = (orig); \
3986 volatile unsigned long _argvec[10]; \
3987 volatile unsigned long _res; \
3988 _argvec[0] = (unsigned long)_orig.nraddr; \
3989 _argvec[1] = (unsigned long)(arg1); \
3990 _argvec[2] = (unsigned long)(arg2); \
3991 _argvec[3] = (unsigned long)(arg3); \
3992 _argvec[4] = (unsigned long)(arg4); \
3993 _argvec[5] = (unsigned long)(arg5); \
3994 _argvec[6] = (unsigned long)(arg6); \
3995 _argvec[7] = (unsigned long)(arg7); \
3996 _argvec[8] = (unsigned long)(arg8); \
3997 _argvec[9] = (unsigned long)(arg9); \
3998 __asm__ volatile( \
3999 VALGRIND_ALIGN_STACK \
4000 "sub sp, sp, #4 \n\t" \
4001 "ldr r0, [%1, #20] \n\t" \
4002 "ldr r1, [%1, #24] \n\t" \
4003 "ldr r2, [%1, #28] \n\t" \
4004 "ldr r3, [%1, #32] \n\t" \
4005 "ldr r4, [%1, #36] \n\t" \
4006 "push {r0, r1, r2, r3, r4} \n\t" \
4007 "ldr r0, [%1, #4] \n\t" \
4008 "ldr r1, [%1, #8] \n\t" \
4009 "ldr r2, [%1, #12] \n\t" \
4010 "ldr r3, [%1, #16] \n\t" \
4011 "ldr r4, [%1] \n\t" /* target->r4 */ \
4012 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4013 VALGRIND_RESTORE_STACK \
4014 "mov %0, r0" \
4015 : /*out*/ "=r" (_res) \
4016 : /*in*/ "0" (&_argvec[0]) \
4017 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4018 ); \
4019 lval = (__typeof__(lval)) _res; \
4020 } while (0)
4021
4022#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4023 arg7,arg8,arg9,arg10) \
4024 do { \
4025 volatile OrigFn _orig = (orig); \
4026 volatile unsigned long _argvec[11]; \
4027 volatile unsigned long _res; \
4028 _argvec[0] = (unsigned long)_orig.nraddr; \
4029 _argvec[1] = (unsigned long)(arg1); \
4030 _argvec[2] = (unsigned long)(arg2); \
4031 _argvec[3] = (unsigned long)(arg3); \
4032 _argvec[4] = (unsigned long)(arg4); \
4033 _argvec[5] = (unsigned long)(arg5); \
4034 _argvec[6] = (unsigned long)(arg6); \
4035 _argvec[7] = (unsigned long)(arg7); \
4036 _argvec[8] = (unsigned long)(arg8); \
4037 _argvec[9] = (unsigned long)(arg9); \
4038 _argvec[10] = (unsigned long)(arg10); \
4039 __asm__ volatile( \
4040 VALGRIND_ALIGN_STACK \
4041 "ldr r0, [%1, #40] \n\t" \
4042 "push {r0} \n\t" \
4043 "ldr r0, [%1, #20] \n\t" \
4044 "ldr r1, [%1, #24] \n\t" \
4045 "ldr r2, [%1, #28] \n\t" \
4046 "ldr r3, [%1, #32] \n\t" \
4047 "ldr r4, [%1, #36] \n\t" \
4048 "push {r0, r1, r2, r3, r4} \n\t" \
4049 "ldr r0, [%1, #4] \n\t" \
4050 "ldr r1, [%1, #8] \n\t" \
4051 "ldr r2, [%1, #12] \n\t" \
4052 "ldr r3, [%1, #16] \n\t" \
4053 "ldr r4, [%1] \n\t" /* target->r4 */ \
4054 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4055 VALGRIND_RESTORE_STACK \
4056 "mov %0, r0" \
4057 : /*out*/ "=r" (_res) \
4058 : /*in*/ "0" (&_argvec[0]) \
4059 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4060 ); \
4061 lval = (__typeof__(lval)) _res; \
4062 } while (0)
4063
4064#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4065 arg6,arg7,arg8,arg9,arg10, \
4066 arg11) \
4067 do { \
4068 volatile OrigFn _orig = (orig); \
4069 volatile unsigned long _argvec[12]; \
4070 volatile unsigned long _res; \
4071 _argvec[0] = (unsigned long)_orig.nraddr; \
4072 _argvec[1] = (unsigned long)(arg1); \
4073 _argvec[2] = (unsigned long)(arg2); \
4074 _argvec[3] = (unsigned long)(arg3); \
4075 _argvec[4] = (unsigned long)(arg4); \
4076 _argvec[5] = (unsigned long)(arg5); \
4077 _argvec[6] = (unsigned long)(arg6); \
4078 _argvec[7] = (unsigned long)(arg7); \
4079 _argvec[8] = (unsigned long)(arg8); \
4080 _argvec[9] = (unsigned long)(arg9); \
4081 _argvec[10] = (unsigned long)(arg10); \
4082 _argvec[11] = (unsigned long)(arg11); \
4083 __asm__ volatile( \
4084 VALGRIND_ALIGN_STACK \
4085 "sub sp, sp, #4 \n\t" \
4086 "ldr r0, [%1, #40] \n\t" \
4087 "ldr r1, [%1, #44] \n\t" \
4088 "push {r0, r1} \n\t" \
4089 "ldr r0, [%1, #20] \n\t" \
4090 "ldr r1, [%1, #24] \n\t" \
4091 "ldr r2, [%1, #28] \n\t" \
4092 "ldr r3, [%1, #32] \n\t" \
4093 "ldr r4, [%1, #36] \n\t" \
4094 "push {r0, r1, r2, r3, r4} \n\t" \
4095 "ldr r0, [%1, #4] \n\t" \
4096 "ldr r1, [%1, #8] \n\t" \
4097 "ldr r2, [%1, #12] \n\t" \
4098 "ldr r3, [%1, #16] \n\t" \
4099 "ldr r4, [%1] \n\t" /* target->r4 */ \
4100 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4101 VALGRIND_RESTORE_STACK \
4102 "mov %0, r0" \
4103 : /*out*/ "=r" (_res) \
4104 : /*in*/ "0" (&_argvec[0]) \
4105 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4106 ); \
4107 lval = (__typeof__(lval)) _res; \
4108 } while (0)
4109
4110#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4111 arg6,arg7,arg8,arg9,arg10, \
4112 arg11,arg12) \
4113 do { \
4114 volatile OrigFn _orig = (orig); \
4115 volatile unsigned long _argvec[13]; \
4116 volatile unsigned long _res; \
4117 _argvec[0] = (unsigned long)_orig.nraddr; \
4118 _argvec[1] = (unsigned long)(arg1); \
4119 _argvec[2] = (unsigned long)(arg2); \
4120 _argvec[3] = (unsigned long)(arg3); \
4121 _argvec[4] = (unsigned long)(arg4); \
4122 _argvec[5] = (unsigned long)(arg5); \
4123 _argvec[6] = (unsigned long)(arg6); \
4124 _argvec[7] = (unsigned long)(arg7); \
4125 _argvec[8] = (unsigned long)(arg8); \
4126 _argvec[9] = (unsigned long)(arg9); \
4127 _argvec[10] = (unsigned long)(arg10); \
4128 _argvec[11] = (unsigned long)(arg11); \
4129 _argvec[12] = (unsigned long)(arg12); \
4130 __asm__ volatile( \
4131 VALGRIND_ALIGN_STACK \
4132 "ldr r0, [%1, #40] \n\t" \
4133 "ldr r1, [%1, #44] \n\t" \
4134 "ldr r2, [%1, #48] \n\t" \
4135 "push {r0, r1, r2} \n\t" \
4136 "ldr r0, [%1, #20] \n\t" \
4137 "ldr r1, [%1, #24] \n\t" \
4138 "ldr r2, [%1, #28] \n\t" \
4139 "ldr r3, [%1, #32] \n\t" \
4140 "ldr r4, [%1, #36] \n\t" \
4141 "push {r0, r1, r2, r3, r4} \n\t" \
4142 "ldr r0, [%1, #4] \n\t" \
4143 "ldr r1, [%1, #8] \n\t" \
4144 "ldr r2, [%1, #12] \n\t" \
4145 "ldr r3, [%1, #16] \n\t" \
4146 "ldr r4, [%1] \n\t" /* target->r4 */ \
4147 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4148 VALGRIND_RESTORE_STACK \
4149 "mov %0, r0" \
4150 : /*out*/ "=r" (_res) \
4151 : /*in*/ "0" (&_argvec[0]) \
4152 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4153 ); \
4154 lval = (__typeof__(lval)) _res; \
4155 } while (0)
4156
4157#endif /* PLAT_arm_linux */
4158
4159/* ------------------------ arm64-linux ------------------------ */
4160
4161#if defined(PLAT_arm64_linux)
4162
4163/* These regs are trashed by the hidden call. */
4164#define __CALLER_SAVED_REGS \
4165 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4166 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4167 "x18", "x19", "x20", "x30", \
4168 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4169 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4170 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4171 "v26", "v27", "v28", "v29", "v30", "v31"
4172
4173/* x21 is callee-saved, so we can use it to save and restore SP around
4174 the hidden call. */
4175#define VALGRIND_ALIGN_STACK \
4176 "mov x21, sp\n\t" \
4177 "bic sp, x21, #15\n\t"
4178#define VALGRIND_RESTORE_STACK \
4179 "mov sp, x21\n\t"
4180
4181/* These CALL_FN_ macros assume that on arm64-linux,
4182 sizeof(unsigned long) == 8. */
4183
4184#define CALL_FN_W_v(lval, orig) \
4185 do { \
4186 volatile OrigFn _orig = (orig); \
4187 volatile unsigned long _argvec[1]; \
4188 volatile unsigned long _res; \
4189 _argvec[0] = (unsigned long)_orig.nraddr; \
4190 __asm__ volatile( \
4191 VALGRIND_ALIGN_STACK \
4192 "ldr x8, [%1] \n\t" /* target->x8 */ \
4193 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4194 VALGRIND_RESTORE_STACK \
4195 "mov %0, x0\n" \
4196 : /*out*/ "=r" (_res) \
4197 : /*in*/ "0" (&_argvec[0]) \
4198 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4199 ); \
4200 lval = (__typeof__(lval)) _res; \
4201 } while (0)
4202
4203#define CALL_FN_W_W(lval, orig, arg1) \
4204 do { \
4205 volatile OrigFn _orig = (orig); \
4206 volatile unsigned long _argvec[2]; \
4207 volatile unsigned long _res; \
4208 _argvec[0] = (unsigned long)_orig.nraddr; \
4209 _argvec[1] = (unsigned long)(arg1); \
4210 __asm__ volatile( \
4211 VALGRIND_ALIGN_STACK \
4212 "ldr x0, [%1, #8] \n\t" \
4213 "ldr x8, [%1] \n\t" /* target->x8 */ \
4214 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4215 VALGRIND_RESTORE_STACK \
4216 "mov %0, x0\n" \
4217 : /*out*/ "=r" (_res) \
4218 : /*in*/ "0" (&_argvec[0]) \
4219 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4220 ); \
4221 lval = (__typeof__(lval)) _res; \
4222 } while (0)
4223
4224#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4225 do { \
4226 volatile OrigFn _orig = (orig); \
4227 volatile unsigned long _argvec[3]; \
4228 volatile unsigned long _res; \
4229 _argvec[0] = (unsigned long)_orig.nraddr; \
4230 _argvec[1] = (unsigned long)(arg1); \
4231 _argvec[2] = (unsigned long)(arg2); \
4232 __asm__ volatile( \
4233 VALGRIND_ALIGN_STACK \
4234 "ldr x0, [%1, #8] \n\t" \
4235 "ldr x1, [%1, #16] \n\t" \
4236 "ldr x8, [%1] \n\t" /* target->x8 */ \
4237 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4238 VALGRIND_RESTORE_STACK \
4239 "mov %0, x0\n" \
4240 : /*out*/ "=r" (_res) \
4241 : /*in*/ "0" (&_argvec[0]) \
4242 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4243 ); \
4244 lval = (__typeof__(lval)) _res; \
4245 } while (0)
4246
4247#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4248 do { \
4249 volatile OrigFn _orig = (orig); \
4250 volatile unsigned long _argvec[4]; \
4251 volatile unsigned long _res; \
4252 _argvec[0] = (unsigned long)_orig.nraddr; \
4253 _argvec[1] = (unsigned long)(arg1); \
4254 _argvec[2] = (unsigned long)(arg2); \
4255 _argvec[3] = (unsigned long)(arg3); \
4256 __asm__ volatile( \
4257 VALGRIND_ALIGN_STACK \
4258 "ldr x0, [%1, #8] \n\t" \
4259 "ldr x1, [%1, #16] \n\t" \
4260 "ldr x2, [%1, #24] \n\t" \
4261 "ldr x8, [%1] \n\t" /* target->x8 */ \
4262 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4263 VALGRIND_RESTORE_STACK \
4264 "mov %0, x0\n" \
4265 : /*out*/ "=r" (_res) \
4266 : /*in*/ "0" (&_argvec[0]) \
4267 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4268 ); \
4269 lval = (__typeof__(lval)) _res; \
4270 } while (0)
4271
4272#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4273 do { \
4274 volatile OrigFn _orig = (orig); \
4275 volatile unsigned long _argvec[5]; \
4276 volatile unsigned long _res; \
4277 _argvec[0] = (unsigned long)_orig.nraddr; \
4278 _argvec[1] = (unsigned long)(arg1); \
4279 _argvec[2] = (unsigned long)(arg2); \
4280 _argvec[3] = (unsigned long)(arg3); \
4281 _argvec[4] = (unsigned long)(arg4); \
4282 __asm__ volatile( \
4283 VALGRIND_ALIGN_STACK \
4284 "ldr x0, [%1, #8] \n\t" \
4285 "ldr x1, [%1, #16] \n\t" \
4286 "ldr x2, [%1, #24] \n\t" \
4287 "ldr x3, [%1, #32] \n\t" \
4288 "ldr x8, [%1] \n\t" /* target->x8 */ \
4289 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4290 VALGRIND_RESTORE_STACK \
4291 "mov %0, x0" \
4292 : /*out*/ "=r" (_res) \
4293 : /*in*/ "0" (&_argvec[0]) \
4294 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4295 ); \
4296 lval = (__typeof__(lval)) _res; \
4297 } while (0)
4298
4299#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4300 do { \
4301 volatile OrigFn _orig = (orig); \
4302 volatile unsigned long _argvec[6]; \
4303 volatile unsigned long _res; \
4304 _argvec[0] = (unsigned long)_orig.nraddr; \
4305 _argvec[1] = (unsigned long)(arg1); \
4306 _argvec[2] = (unsigned long)(arg2); \
4307 _argvec[3] = (unsigned long)(arg3); \
4308 _argvec[4] = (unsigned long)(arg4); \
4309 _argvec[5] = (unsigned long)(arg5); \
4310 __asm__ volatile( \
4311 VALGRIND_ALIGN_STACK \
4312 "ldr x0, [%1, #8] \n\t" \
4313 "ldr x1, [%1, #16] \n\t" \
4314 "ldr x2, [%1, #24] \n\t" \
4315 "ldr x3, [%1, #32] \n\t" \
4316 "ldr x4, [%1, #40] \n\t" \
4317 "ldr x8, [%1] \n\t" /* target->x8 */ \
4318 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4319 VALGRIND_RESTORE_STACK \
4320 "mov %0, x0" \
4321 : /*out*/ "=r" (_res) \
4322 : /*in*/ "0" (&_argvec[0]) \
4323 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4324 ); \
4325 lval = (__typeof__(lval)) _res; \
4326 } while (0)
4327
4328#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4329 do { \
4330 volatile OrigFn _orig = (orig); \
4331 volatile unsigned long _argvec[7]; \
4332 volatile unsigned long _res; \
4333 _argvec[0] = (unsigned long)_orig.nraddr; \
4334 _argvec[1] = (unsigned long)(arg1); \
4335 _argvec[2] = (unsigned long)(arg2); \
4336 _argvec[3] = (unsigned long)(arg3); \
4337 _argvec[4] = (unsigned long)(arg4); \
4338 _argvec[5] = (unsigned long)(arg5); \
4339 _argvec[6] = (unsigned long)(arg6); \
4340 __asm__ volatile( \
4341 VALGRIND_ALIGN_STACK \
4342 "ldr x0, [%1, #8] \n\t" \
4343 "ldr x1, [%1, #16] \n\t" \
4344 "ldr x2, [%1, #24] \n\t" \
4345 "ldr x3, [%1, #32] \n\t" \
4346 "ldr x4, [%1, #40] \n\t" \
4347 "ldr x5, [%1, #48] \n\t" \
4348 "ldr x8, [%1] \n\t" /* target->x8 */ \
4349 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4350 VALGRIND_RESTORE_STACK \
4351 "mov %0, x0" \
4352 : /*out*/ "=r" (_res) \
4353 : /*in*/ "0" (&_argvec[0]) \
4354 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4355 ); \
4356 lval = (__typeof__(lval)) _res; \
4357 } while (0)
4358
4359#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4360 arg7) \
4361 do { \
4362 volatile OrigFn _orig = (orig); \
4363 volatile unsigned long _argvec[8]; \
4364 volatile unsigned long _res; \
4365 _argvec[0] = (unsigned long)_orig.nraddr; \
4366 _argvec[1] = (unsigned long)(arg1); \
4367 _argvec[2] = (unsigned long)(arg2); \
4368 _argvec[3] = (unsigned long)(arg3); \
4369 _argvec[4] = (unsigned long)(arg4); \
4370 _argvec[5] = (unsigned long)(arg5); \
4371 _argvec[6] = (unsigned long)(arg6); \
4372 _argvec[7] = (unsigned long)(arg7); \
4373 __asm__ volatile( \
4374 VALGRIND_ALIGN_STACK \
4375 "ldr x0, [%1, #8] \n\t" \
4376 "ldr x1, [%1, #16] \n\t" \
4377 "ldr x2, [%1, #24] \n\t" \
4378 "ldr x3, [%1, #32] \n\t" \
4379 "ldr x4, [%1, #40] \n\t" \
4380 "ldr x5, [%1, #48] \n\t" \
4381 "ldr x6, [%1, #56] \n\t" \
4382 "ldr x8, [%1] \n\t" /* target->x8 */ \
4383 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4384 VALGRIND_RESTORE_STACK \
4385 "mov %0, x0" \
4386 : /*out*/ "=r" (_res) \
4387 : /*in*/ "0" (&_argvec[0]) \
4388 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4389 ); \
4390 lval = (__typeof__(lval)) _res; \
4391 } while (0)
4392
4393#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4394 arg7,arg8) \
4395 do { \
4396 volatile OrigFn _orig = (orig); \
4397 volatile unsigned long _argvec[9]; \
4398 volatile unsigned long _res; \
4399 _argvec[0] = (unsigned long)_orig.nraddr; \
4400 _argvec[1] = (unsigned long)(arg1); \
4401 _argvec[2] = (unsigned long)(arg2); \
4402 _argvec[3] = (unsigned long)(arg3); \
4403 _argvec[4] = (unsigned long)(arg4); \
4404 _argvec[5] = (unsigned long)(arg5); \
4405 _argvec[6] = (unsigned long)(arg6); \
4406 _argvec[7] = (unsigned long)(arg7); \
4407 _argvec[8] = (unsigned long)(arg8); \
4408 __asm__ volatile( \
4409 VALGRIND_ALIGN_STACK \
4410 "ldr x0, [%1, #8] \n\t" \
4411 "ldr x1, [%1, #16] \n\t" \
4412 "ldr x2, [%1, #24] \n\t" \
4413 "ldr x3, [%1, #32] \n\t" \
4414 "ldr x4, [%1, #40] \n\t" \
4415 "ldr x5, [%1, #48] \n\t" \
4416 "ldr x6, [%1, #56] \n\t" \
4417 "ldr x7, [%1, #64] \n\t" \
4418 "ldr x8, [%1] \n\t" /* target->x8 */ \
4419 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4420 VALGRIND_RESTORE_STACK \
4421 "mov %0, x0" \
4422 : /*out*/ "=r" (_res) \
4423 : /*in*/ "0" (&_argvec[0]) \
4424 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4425 ); \
4426 lval = (__typeof__(lval)) _res; \
4427 } while (0)
4428
4429#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4430 arg7,arg8,arg9) \
4431 do { \
4432 volatile OrigFn _orig = (orig); \
4433 volatile unsigned long _argvec[10]; \
4434 volatile unsigned long _res; \
4435 _argvec[0] = (unsigned long)_orig.nraddr; \
4436 _argvec[1] = (unsigned long)(arg1); \
4437 _argvec[2] = (unsigned long)(arg2); \
4438 _argvec[3] = (unsigned long)(arg3); \
4439 _argvec[4] = (unsigned long)(arg4); \
4440 _argvec[5] = (unsigned long)(arg5); \
4441 _argvec[6] = (unsigned long)(arg6); \
4442 _argvec[7] = (unsigned long)(arg7); \
4443 _argvec[8] = (unsigned long)(arg8); \
4444 _argvec[9] = (unsigned long)(arg9); \
4445 __asm__ volatile( \
4446 VALGRIND_ALIGN_STACK \
4447 "sub sp, sp, #0x20 \n\t" \
4448 "ldr x0, [%1, #8] \n\t" \
4449 "ldr x1, [%1, #16] \n\t" \
4450 "ldr x2, [%1, #24] \n\t" \
4451 "ldr x3, [%1, #32] \n\t" \
4452 "ldr x4, [%1, #40] \n\t" \
4453 "ldr x5, [%1, #48] \n\t" \
4454 "ldr x6, [%1, #56] \n\t" \
4455 "ldr x7, [%1, #64] \n\t" \
4456 "ldr x8, [%1, #72] \n\t" \
4457 "str x8, [sp, #0] \n\t" \
4458 "ldr x8, [%1] \n\t" /* target->x8 */ \
4459 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4460 VALGRIND_RESTORE_STACK \
4461 "mov %0, x0" \
4462 : /*out*/ "=r" (_res) \
4463 : /*in*/ "0" (&_argvec[0]) \
4464 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4465 ); \
4466 lval = (__typeof__(lval)) _res; \
4467 } while (0)
4468
4469#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4470 arg7,arg8,arg9,arg10) \
4471 do { \
4472 volatile OrigFn _orig = (orig); \
4473 volatile unsigned long _argvec[11]; \
4474 volatile unsigned long _res; \
4475 _argvec[0] = (unsigned long)_orig.nraddr; \
4476 _argvec[1] = (unsigned long)(arg1); \
4477 _argvec[2] = (unsigned long)(arg2); \
4478 _argvec[3] = (unsigned long)(arg3); \
4479 _argvec[4] = (unsigned long)(arg4); \
4480 _argvec[5] = (unsigned long)(arg5); \
4481 _argvec[6] = (unsigned long)(arg6); \
4482 _argvec[7] = (unsigned long)(arg7); \
4483 _argvec[8] = (unsigned long)(arg8); \
4484 _argvec[9] = (unsigned long)(arg9); \
4485 _argvec[10] = (unsigned long)(arg10); \
4486 __asm__ volatile( \
4487 VALGRIND_ALIGN_STACK \
4488 "sub sp, sp, #0x20 \n\t" \
4489 "ldr x0, [%1, #8] \n\t" \
4490 "ldr x1, [%1, #16] \n\t" \
4491 "ldr x2, [%1, #24] \n\t" \
4492 "ldr x3, [%1, #32] \n\t" \
4493 "ldr x4, [%1, #40] \n\t" \
4494 "ldr x5, [%1, #48] \n\t" \
4495 "ldr x6, [%1, #56] \n\t" \
4496 "ldr x7, [%1, #64] \n\t" \
4497 "ldr x8, [%1, #72] \n\t" \
4498 "str x8, [sp, #0] \n\t" \
4499 "ldr x8, [%1, #80] \n\t" \
4500 "str x8, [sp, #8] \n\t" \
4501 "ldr x8, [%1] \n\t" /* target->x8 */ \
4502 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4503 VALGRIND_RESTORE_STACK \
4504 "mov %0, x0" \
4505 : /*out*/ "=r" (_res) \
4506 : /*in*/ "0" (&_argvec[0]) \
4507 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4508 ); \
4509 lval = (__typeof__(lval)) _res; \
4510 } while (0)
4511
4512#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4513 arg7,arg8,arg9,arg10,arg11) \
4514 do { \
4515 volatile OrigFn _orig = (orig); \
4516 volatile unsigned long _argvec[12]; \
4517 volatile unsigned long _res; \
4518 _argvec[0] = (unsigned long)_orig.nraddr; \
4519 _argvec[1] = (unsigned long)(arg1); \
4520 _argvec[2] = (unsigned long)(arg2); \
4521 _argvec[3] = (unsigned long)(arg3); \
4522 _argvec[4] = (unsigned long)(arg4); \
4523 _argvec[5] = (unsigned long)(arg5); \
4524 _argvec[6] = (unsigned long)(arg6); \
4525 _argvec[7] = (unsigned long)(arg7); \
4526 _argvec[8] = (unsigned long)(arg8); \
4527 _argvec[9] = (unsigned long)(arg9); \
4528 _argvec[10] = (unsigned long)(arg10); \
4529 _argvec[11] = (unsigned long)(arg11); \
4530 __asm__ volatile( \
4531 VALGRIND_ALIGN_STACK \
4532 "sub sp, sp, #0x30 \n\t" \
4533 "ldr x0, [%1, #8] \n\t" \
4534 "ldr x1, [%1, #16] \n\t" \
4535 "ldr x2, [%1, #24] \n\t" \
4536 "ldr x3, [%1, #32] \n\t" \
4537 "ldr x4, [%1, #40] \n\t" \
4538 "ldr x5, [%1, #48] \n\t" \
4539 "ldr x6, [%1, #56] \n\t" \
4540 "ldr x7, [%1, #64] \n\t" \
4541 "ldr x8, [%1, #72] \n\t" \
4542 "str x8, [sp, #0] \n\t" \
4543 "ldr x8, [%1, #80] \n\t" \
4544 "str x8, [sp, #8] \n\t" \
4545 "ldr x8, [%1, #88] \n\t" \
4546 "str x8, [sp, #16] \n\t" \
4547 "ldr x8, [%1] \n\t" /* target->x8 */ \
4548 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4549 VALGRIND_RESTORE_STACK \
4550 "mov %0, x0" \
4551 : /*out*/ "=r" (_res) \
4552 : /*in*/ "0" (&_argvec[0]) \
4553 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4554 ); \
4555 lval = (__typeof__(lval)) _res; \
4556 } while (0)
4557
4558#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4559 arg7,arg8,arg9,arg10,arg11, \
4560 arg12) \
4561 do { \
4562 volatile OrigFn _orig = (orig); \
4563 volatile unsigned long _argvec[13]; \
4564 volatile unsigned long _res; \
4565 _argvec[0] = (unsigned long)_orig.nraddr; \
4566 _argvec[1] = (unsigned long)(arg1); \
4567 _argvec[2] = (unsigned long)(arg2); \
4568 _argvec[3] = (unsigned long)(arg3); \
4569 _argvec[4] = (unsigned long)(arg4); \
4570 _argvec[5] = (unsigned long)(arg5); \
4571 _argvec[6] = (unsigned long)(arg6); \
4572 _argvec[7] = (unsigned long)(arg7); \
4573 _argvec[8] = (unsigned long)(arg8); \
4574 _argvec[9] = (unsigned long)(arg9); \
4575 _argvec[10] = (unsigned long)(arg10); \
4576 _argvec[11] = (unsigned long)(arg11); \
4577 _argvec[12] = (unsigned long)(arg12); \
4578 __asm__ volatile( \
4579 VALGRIND_ALIGN_STACK \
4580 "sub sp, sp, #0x30 \n\t" \
4581 "ldr x0, [%1, #8] \n\t" \
4582 "ldr x1, [%1, #16] \n\t" \
4583 "ldr x2, [%1, #24] \n\t" \
4584 "ldr x3, [%1, #32] \n\t" \
4585 "ldr x4, [%1, #40] \n\t" \
4586 "ldr x5, [%1, #48] \n\t" \
4587 "ldr x6, [%1, #56] \n\t" \
4588 "ldr x7, [%1, #64] \n\t" \
4589 "ldr x8, [%1, #72] \n\t" \
4590 "str x8, [sp, #0] \n\t" \
4591 "ldr x8, [%1, #80] \n\t" \
4592 "str x8, [sp, #8] \n\t" \
4593 "ldr x8, [%1, #88] \n\t" \
4594 "str x8, [sp, #16] \n\t" \
4595 "ldr x8, [%1, #96] \n\t" \
4596 "str x8, [sp, #24] \n\t" \
4597 "ldr x8, [%1] \n\t" /* target->x8 */ \
4598 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4599 VALGRIND_RESTORE_STACK \
4600 "mov %0, x0" \
4601 : /*out*/ "=r" (_res) \
4602 : /*in*/ "0" (&_argvec[0]) \
4603 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4604 ); \
4605 lval = (__typeof__(lval)) _res; \
4606 } while (0)
4607
4608#endif /* PLAT_arm64_linux */
4609
4610/* ------------------------- s390x-linux ------------------------- */
4611
4612#if defined(PLAT_s390x_linux)
4613
4614/* Similar workaround as amd64 (see above), but we use r11 as frame
4615 pointer and save the old r11 in r7. r11 might be used for
4616 argvec, therefore we copy argvec in r1 since r1 is clobbered
4617 after the call anyway. */
4618#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4619# define __FRAME_POINTER \
4620 ,"d"(__builtin_dwarf_cfa())
4621# define VALGRIND_CFI_PROLOGUE \
4622 ".cfi_remember_state\n\t" \
4623 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4624 "lgr 7,11\n\t" \
4625 "lgr 11,%2\n\t" \
4626 ".cfi_def_cfa r11, 0\n\t"
4627# define VALGRIND_CFI_EPILOGUE \
4628 "lgr 11, 7\n\t" \
4629 ".cfi_restore_state\n\t"
4630#else
4631# define __FRAME_POINTER
4632# define VALGRIND_CFI_PROLOGUE \
4633 "lgr 1,%1\n\t"
4634# define VALGRIND_CFI_EPILOGUE
4635#endif
4636
4637/* Nb: On s390 the stack pointer is properly aligned *at all times*
4638 according to the s390 GCC maintainer. (The ABI specification is not
4639 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4640 VALGRIND_RESTORE_STACK are not defined here. */
4641
4642/* These regs are trashed by the hidden call. Note that we overwrite
4643 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4644 function a proper return address. All others are ABI defined call
4645 clobbers. */
4646#define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4647 "f0","f1","f2","f3","f4","f5","f6","f7"
4648
4649/* Nb: Although r11 is modified in the asm snippets below (inside
4650 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4651 two reasons:
4652 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4653 modified
4654 (2) GCC will complain that r11 cannot appear inside a clobber section,
4655 when compiled with -O -fno-omit-frame-pointer
4656 */
4657
4658#define CALL_FN_W_v(lval, orig) \
4659 do { \
4660 volatile OrigFn _orig = (orig); \
4661 volatile unsigned long _argvec[1]; \
4662 volatile unsigned long _res; \
4663 _argvec[0] = (unsigned long)_orig.nraddr; \
4664 __asm__ volatile( \
4665 VALGRIND_CFI_PROLOGUE \
4666 "aghi 15,-160\n\t" \
4667 "lg 1, 0(1)\n\t" /* target->r1 */ \
4668 VALGRIND_CALL_NOREDIR_R1 \
4669 "lgr %0, 2\n\t" \
4670 "aghi 15,160\n\t" \
4671 VALGRIND_CFI_EPILOGUE \
4672 : /*out*/ "=d" (_res) \
4673 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4674 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4675 ); \
4676 lval = (__typeof__(lval)) _res; \
4677 } while (0)
4678
4679/* The call abi has the arguments in r2-r6 and stack */
4680#define CALL_FN_W_W(lval, orig, arg1) \
4681 do { \
4682 volatile OrigFn _orig = (orig); \
4683 volatile unsigned long _argvec[2]; \
4684 volatile unsigned long _res; \
4685 _argvec[0] = (unsigned long)_orig.nraddr; \
4686 _argvec[1] = (unsigned long)arg1; \
4687 __asm__ volatile( \
4688 VALGRIND_CFI_PROLOGUE \
4689 "aghi 15,-160\n\t" \
4690 "lg 2, 8(1)\n\t" \
4691 "lg 1, 0(1)\n\t" \
4692 VALGRIND_CALL_NOREDIR_R1 \
4693 "lgr %0, 2\n\t" \
4694 "aghi 15,160\n\t" \
4695 VALGRIND_CFI_EPILOGUE \
4696 : /*out*/ "=d" (_res) \
4697 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4698 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4699 ); \
4700 lval = (__typeof__(lval)) _res; \
4701 } while (0)
4702
4703#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4704 do { \
4705 volatile OrigFn _orig = (orig); \
4706 volatile unsigned long _argvec[3]; \
4707 volatile unsigned long _res; \
4708 _argvec[0] = (unsigned long)_orig.nraddr; \
4709 _argvec[1] = (unsigned long)arg1; \
4710 _argvec[2] = (unsigned long)arg2; \
4711 __asm__ volatile( \
4712 VALGRIND_CFI_PROLOGUE \
4713 "aghi 15,-160\n\t" \
4714 "lg 2, 8(1)\n\t" \
4715 "lg 3,16(1)\n\t" \
4716 "lg 1, 0(1)\n\t" \
4717 VALGRIND_CALL_NOREDIR_R1 \
4718 "lgr %0, 2\n\t" \
4719 "aghi 15,160\n\t" \
4720 VALGRIND_CFI_EPILOGUE \
4721 : /*out*/ "=d" (_res) \
4722 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4723 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4724 ); \
4725 lval = (__typeof__(lval)) _res; \
4726 } while (0)
4727
4728#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4729 do { \
4730 volatile OrigFn _orig = (orig); \
4731 volatile unsigned long _argvec[4]; \
4732 volatile unsigned long _res; \
4733 _argvec[0] = (unsigned long)_orig.nraddr; \
4734 _argvec[1] = (unsigned long)arg1; \
4735 _argvec[2] = (unsigned long)arg2; \
4736 _argvec[3] = (unsigned long)arg3; \
4737 __asm__ volatile( \
4738 VALGRIND_CFI_PROLOGUE \
4739 "aghi 15,-160\n\t" \
4740 "lg 2, 8(1)\n\t" \
4741 "lg 3,16(1)\n\t" \
4742 "lg 4,24(1)\n\t" \
4743 "lg 1, 0(1)\n\t" \
4744 VALGRIND_CALL_NOREDIR_R1 \
4745 "lgr %0, 2\n\t" \
4746 "aghi 15,160\n\t" \
4747 VALGRIND_CFI_EPILOGUE \
4748 : /*out*/ "=d" (_res) \
4749 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4750 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4751 ); \
4752 lval = (__typeof__(lval)) _res; \
4753 } while (0)
4754
4755#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4756 do { \
4757 volatile OrigFn _orig = (orig); \
4758 volatile unsigned long _argvec[5]; \
4759 volatile unsigned long _res; \
4760 _argvec[0] = (unsigned long)_orig.nraddr; \
4761 _argvec[1] = (unsigned long)arg1; \
4762 _argvec[2] = (unsigned long)arg2; \
4763 _argvec[3] = (unsigned long)arg3; \
4764 _argvec[4] = (unsigned long)arg4; \
4765 __asm__ volatile( \
4766 VALGRIND_CFI_PROLOGUE \
4767 "aghi 15,-160\n\t" \
4768 "lg 2, 8(1)\n\t" \
4769 "lg 3,16(1)\n\t" \
4770 "lg 4,24(1)\n\t" \
4771 "lg 5,32(1)\n\t" \
4772 "lg 1, 0(1)\n\t" \
4773 VALGRIND_CALL_NOREDIR_R1 \
4774 "lgr %0, 2\n\t" \
4775 "aghi 15,160\n\t" \
4776 VALGRIND_CFI_EPILOGUE \
4777 : /*out*/ "=d" (_res) \
4778 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4779 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4780 ); \
4781 lval = (__typeof__(lval)) _res; \
4782 } while (0)
4783
4784#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4785 do { \
4786 volatile OrigFn _orig = (orig); \
4787 volatile unsigned long _argvec[6]; \
4788 volatile unsigned long _res; \
4789 _argvec[0] = (unsigned long)_orig.nraddr; \
4790 _argvec[1] = (unsigned long)arg1; \
4791 _argvec[2] = (unsigned long)arg2; \
4792 _argvec[3] = (unsigned long)arg3; \
4793 _argvec[4] = (unsigned long)arg4; \
4794 _argvec[5] = (unsigned long)arg5; \
4795 __asm__ volatile( \
4796 VALGRIND_CFI_PROLOGUE \
4797 "aghi 15,-160\n\t" \
4798 "lg 2, 8(1)\n\t" \
4799 "lg 3,16(1)\n\t" \
4800 "lg 4,24(1)\n\t" \
4801 "lg 5,32(1)\n\t" \
4802 "lg 6,40(1)\n\t" \
4803 "lg 1, 0(1)\n\t" \
4804 VALGRIND_CALL_NOREDIR_R1 \
4805 "lgr %0, 2\n\t" \
4806 "aghi 15,160\n\t" \
4807 VALGRIND_CFI_EPILOGUE \
4808 : /*out*/ "=d" (_res) \
4809 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4811 ); \
4812 lval = (__typeof__(lval)) _res; \
4813 } while (0)
4814
4815#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4816 arg6) \
4817 do { \
4818 volatile OrigFn _orig = (orig); \
4819 volatile unsigned long _argvec[7]; \
4820 volatile unsigned long _res; \
4821 _argvec[0] = (unsigned long)_orig.nraddr; \
4822 _argvec[1] = (unsigned long)arg1; \
4823 _argvec[2] = (unsigned long)arg2; \
4824 _argvec[3] = (unsigned long)arg3; \
4825 _argvec[4] = (unsigned long)arg4; \
4826 _argvec[5] = (unsigned long)arg5; \
4827 _argvec[6] = (unsigned long)arg6; \
4828 __asm__ volatile( \
4829 VALGRIND_CFI_PROLOGUE \
4830 "aghi 15,-168\n\t" \
4831 "lg 2, 8(1)\n\t" \
4832 "lg 3,16(1)\n\t" \
4833 "lg 4,24(1)\n\t" \
4834 "lg 5,32(1)\n\t" \
4835 "lg 6,40(1)\n\t" \
4836 "mvc 160(8,15), 48(1)\n\t" \
4837 "lg 1, 0(1)\n\t" \
4838 VALGRIND_CALL_NOREDIR_R1 \
4839 "lgr %0, 2\n\t" \
4840 "aghi 15,168\n\t" \
4841 VALGRIND_CFI_EPILOGUE \
4842 : /*out*/ "=d" (_res) \
4843 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4844 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4845 ); \
4846 lval = (__typeof__(lval)) _res; \
4847 } while (0)
4848
4849#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4850 arg6, arg7) \
4851 do { \
4852 volatile OrigFn _orig = (orig); \
4853 volatile unsigned long _argvec[8]; \
4854 volatile unsigned long _res; \
4855 _argvec[0] = (unsigned long)_orig.nraddr; \
4856 _argvec[1] = (unsigned long)arg1; \
4857 _argvec[2] = (unsigned long)arg2; \
4858 _argvec[3] = (unsigned long)arg3; \
4859 _argvec[4] = (unsigned long)arg4; \
4860 _argvec[5] = (unsigned long)arg5; \
4861 _argvec[6] = (unsigned long)arg6; \
4862 _argvec[7] = (unsigned long)arg7; \
4863 __asm__ volatile( \
4864 VALGRIND_CFI_PROLOGUE \
4865 "aghi 15,-176\n\t" \
4866 "lg 2, 8(1)\n\t" \
4867 "lg 3,16(1)\n\t" \
4868 "lg 4,24(1)\n\t" \
4869 "lg 5,32(1)\n\t" \
4870 "lg 6,40(1)\n\t" \
4871 "mvc 160(8,15), 48(1)\n\t" \
4872 "mvc 168(8,15), 56(1)\n\t" \
4873 "lg 1, 0(1)\n\t" \
4874 VALGRIND_CALL_NOREDIR_R1 \
4875 "lgr %0, 2\n\t" \
4876 "aghi 15,176\n\t" \
4877 VALGRIND_CFI_EPILOGUE \
4878 : /*out*/ "=d" (_res) \
4879 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4880 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4881 ); \
4882 lval = (__typeof__(lval)) _res; \
4883 } while (0)
4884
4885#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4886 arg6, arg7 ,arg8) \
4887 do { \
4888 volatile OrigFn _orig = (orig); \
4889 volatile unsigned long _argvec[9]; \
4890 volatile unsigned long _res; \
4891 _argvec[0] = (unsigned long)_orig.nraddr; \
4892 _argvec[1] = (unsigned long)arg1; \
4893 _argvec[2] = (unsigned long)arg2; \
4894 _argvec[3] = (unsigned long)arg3; \
4895 _argvec[4] = (unsigned long)arg4; \
4896 _argvec[5] = (unsigned long)arg5; \
4897 _argvec[6] = (unsigned long)arg6; \
4898 _argvec[7] = (unsigned long)arg7; \
4899 _argvec[8] = (unsigned long)arg8; \
4900 __asm__ volatile( \
4901 VALGRIND_CFI_PROLOGUE \
4902 "aghi 15,-184\n\t" \
4903 "lg 2, 8(1)\n\t" \
4904 "lg 3,16(1)\n\t" \
4905 "lg 4,24(1)\n\t" \
4906 "lg 5,32(1)\n\t" \
4907 "lg 6,40(1)\n\t" \
4908 "mvc 160(8,15), 48(1)\n\t" \
4909 "mvc 168(8,15), 56(1)\n\t" \
4910 "mvc 176(8,15), 64(1)\n\t" \
4911 "lg 1, 0(1)\n\t" \
4912 VALGRIND_CALL_NOREDIR_R1 \
4913 "lgr %0, 2\n\t" \
4914 "aghi 15,184\n\t" \
4915 VALGRIND_CFI_EPILOGUE \
4916 : /*out*/ "=d" (_res) \
4917 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4918 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4919 ); \
4920 lval = (__typeof__(lval)) _res; \
4921 } while (0)
4922
4923#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4924 arg6, arg7 ,arg8, arg9) \
4925 do { \
4926 volatile OrigFn _orig = (orig); \
4927 volatile unsigned long _argvec[10]; \
4928 volatile unsigned long _res; \
4929 _argvec[0] = (unsigned long)_orig.nraddr; \
4930 _argvec[1] = (unsigned long)arg1; \
4931 _argvec[2] = (unsigned long)arg2; \
4932 _argvec[3] = (unsigned long)arg3; \
4933 _argvec[4] = (unsigned long)arg4; \
4934 _argvec[5] = (unsigned long)arg5; \
4935 _argvec[6] = (unsigned long)arg6; \
4936 _argvec[7] = (unsigned long)arg7; \
4937 _argvec[8] = (unsigned long)arg8; \
4938 _argvec[9] = (unsigned long)arg9; \
4939 __asm__ volatile( \
4940 VALGRIND_CFI_PROLOGUE \
4941 "aghi 15,-192\n\t" \
4942 "lg 2, 8(1)\n\t" \
4943 "lg 3,16(1)\n\t" \
4944 "lg 4,24(1)\n\t" \
4945 "lg 5,32(1)\n\t" \
4946 "lg 6,40(1)\n\t" \
4947 "mvc 160(8,15), 48(1)\n\t" \
4948 "mvc 168(8,15), 56(1)\n\t" \
4949 "mvc 176(8,15), 64(1)\n\t" \
4950 "mvc 184(8,15), 72(1)\n\t" \
4951 "lg 1, 0(1)\n\t" \
4952 VALGRIND_CALL_NOREDIR_R1 \
4953 "lgr %0, 2\n\t" \
4954 "aghi 15,192\n\t" \
4955 VALGRIND_CFI_EPILOGUE \
4956 : /*out*/ "=d" (_res) \
4957 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4958 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4959 ); \
4960 lval = (__typeof__(lval)) _res; \
4961 } while (0)
4962
4963#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4964 arg6, arg7 ,arg8, arg9, arg10) \
4965 do { \
4966 volatile OrigFn _orig = (orig); \
4967 volatile unsigned long _argvec[11]; \
4968 volatile unsigned long _res; \
4969 _argvec[0] = (unsigned long)_orig.nraddr; \
4970 _argvec[1] = (unsigned long)arg1; \
4971 _argvec[2] = (unsigned long)arg2; \
4972 _argvec[3] = (unsigned long)arg3; \
4973 _argvec[4] = (unsigned long)arg4; \
4974 _argvec[5] = (unsigned long)arg5; \
4975 _argvec[6] = (unsigned long)arg6; \
4976 _argvec[7] = (unsigned long)arg7; \
4977 _argvec[8] = (unsigned long)arg8; \
4978 _argvec[9] = (unsigned long)arg9; \
4979 _argvec[10] = (unsigned long)arg10; \
4980 __asm__ volatile( \
4981 VALGRIND_CFI_PROLOGUE \
4982 "aghi 15,-200\n\t" \
4983 "lg 2, 8(1)\n\t" \
4984 "lg 3,16(1)\n\t" \
4985 "lg 4,24(1)\n\t" \
4986 "lg 5,32(1)\n\t" \
4987 "lg 6,40(1)\n\t" \
4988 "mvc 160(8,15), 48(1)\n\t" \
4989 "mvc 168(8,15), 56(1)\n\t" \
4990 "mvc 176(8,15), 64(1)\n\t" \
4991 "mvc 184(8,15), 72(1)\n\t" \
4992 "mvc 192(8,15), 80(1)\n\t" \
4993 "lg 1, 0(1)\n\t" \
4994 VALGRIND_CALL_NOREDIR_R1 \
4995 "lgr %0, 2\n\t" \
4996 "aghi 15,200\n\t" \
4997 VALGRIND_CFI_EPILOGUE \
4998 : /*out*/ "=d" (_res) \
4999 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5000 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5001 ); \
5002 lval = (__typeof__(lval)) _res; \
5003 } while (0)
5004
5005#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5006 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5007 do { \
5008 volatile OrigFn _orig = (orig); \
5009 volatile unsigned long _argvec[12]; \
5010 volatile unsigned long _res; \
5011 _argvec[0] = (unsigned long)_orig.nraddr; \
5012 _argvec[1] = (unsigned long)arg1; \
5013 _argvec[2] = (unsigned long)arg2; \
5014 _argvec[3] = (unsigned long)arg3; \
5015 _argvec[4] = (unsigned long)arg4; \
5016 _argvec[5] = (unsigned long)arg5; \
5017 _argvec[6] = (unsigned long)arg6; \
5018 _argvec[7] = (unsigned long)arg7; \
5019 _argvec[8] = (unsigned long)arg8; \
5020 _argvec[9] = (unsigned long)arg9; \
5021 _argvec[10] = (unsigned long)arg10; \
5022 _argvec[11] = (unsigned long)arg11; \
5023 __asm__ volatile( \
5024 VALGRIND_CFI_PROLOGUE \
5025 "aghi 15,-208\n\t" \
5026 "lg 2, 8(1)\n\t" \
5027 "lg 3,16(1)\n\t" \
5028 "lg 4,24(1)\n\t" \
5029 "lg 5,32(1)\n\t" \
5030 "lg 6,40(1)\n\t" \
5031 "mvc 160(8,15), 48(1)\n\t" \
5032 "mvc 168(8,15), 56(1)\n\t" \
5033 "mvc 176(8,15), 64(1)\n\t" \
5034 "mvc 184(8,15), 72(1)\n\t" \
5035 "mvc 192(8,15), 80(1)\n\t" \
5036 "mvc 200(8,15), 88(1)\n\t" \
5037 "lg 1, 0(1)\n\t" \
5038 VALGRIND_CALL_NOREDIR_R1 \
5039 "lgr %0, 2\n\t" \
5040 "aghi 15,208\n\t" \
5041 VALGRIND_CFI_EPILOGUE \
5042 : /*out*/ "=d" (_res) \
5043 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5044 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5045 ); \
5046 lval = (__typeof__(lval)) _res; \
5047 } while (0)
5048
5049#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5050 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5051 do { \
5052 volatile OrigFn _orig = (orig); \
5053 volatile unsigned long _argvec[13]; \
5054 volatile unsigned long _res; \
5055 _argvec[0] = (unsigned long)_orig.nraddr; \
5056 _argvec[1] = (unsigned long)arg1; \
5057 _argvec[2] = (unsigned long)arg2; \
5058 _argvec[3] = (unsigned long)arg3; \
5059 _argvec[4] = (unsigned long)arg4; \
5060 _argvec[5] = (unsigned long)arg5; \
5061 _argvec[6] = (unsigned long)arg6; \
5062 _argvec[7] = (unsigned long)arg7; \
5063 _argvec[8] = (unsigned long)arg8; \
5064 _argvec[9] = (unsigned long)arg9; \
5065 _argvec[10] = (unsigned long)arg10; \
5066 _argvec[11] = (unsigned long)arg11; \
5067 _argvec[12] = (unsigned long)arg12; \
5068 __asm__ volatile( \
5069 VALGRIND_CFI_PROLOGUE \
5070 "aghi 15,-216\n\t" \
5071 "lg 2, 8(1)\n\t" \
5072 "lg 3,16(1)\n\t" \
5073 "lg 4,24(1)\n\t" \
5074 "lg 5,32(1)\n\t" \
5075 "lg 6,40(1)\n\t" \
5076 "mvc 160(8,15), 48(1)\n\t" \
5077 "mvc 168(8,15), 56(1)\n\t" \
5078 "mvc 176(8,15), 64(1)\n\t" \
5079 "mvc 184(8,15), 72(1)\n\t" \
5080 "mvc 192(8,15), 80(1)\n\t" \
5081 "mvc 200(8,15), 88(1)\n\t" \
5082 "mvc 208(8,15), 96(1)\n\t" \
5083 "lg 1, 0(1)\n\t" \
5084 VALGRIND_CALL_NOREDIR_R1 \
5085 "lgr %0, 2\n\t" \
5086 "aghi 15,216\n\t" \
5087 VALGRIND_CFI_EPILOGUE \
5088 : /*out*/ "=d" (_res) \
5089 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5090 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5091 ); \
5092 lval = (__typeof__(lval)) _res; \
5093 } while (0)
5094
5095
5096#endif /* PLAT_s390x_linux */
5097
5098/* ------------------------- mips32-linux ----------------------- */
5099
5100#if defined(PLAT_mips32_linux)
5101
5102/* These regs are trashed by the hidden call. */
5103#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5104"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5105"$25", "$31"
5106
5107/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5108 long) == 4. */
5109
5110#define CALL_FN_W_v(lval, orig) \
5111 do { \
5112 volatile OrigFn _orig = (orig); \
5113 volatile unsigned long _argvec[1]; \
5114 volatile unsigned long _res; \
5115 _argvec[0] = (unsigned long)_orig.nraddr; \
5116 __asm__ volatile( \
5117 "subu $29, $29, 8 \n\t" \
5118 "sw $28, 0($29) \n\t" \
5119 "sw $31, 4($29) \n\t" \
5120 "subu $29, $29, 16 \n\t" \
5121 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5122 VALGRIND_CALL_NOREDIR_T9 \
5123 "addu $29, $29, 16\n\t" \
5124 "lw $28, 0($29) \n\t" \
5125 "lw $31, 4($29) \n\t" \
5126 "addu $29, $29, 8 \n\t" \
5127 "move %0, $2\n" \
5128 : /*out*/ "=r" (_res) \
5129 : /*in*/ "0" (&_argvec[0]) \
5130 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5131 ); \
5132 lval = (__typeof__(lval)) _res; \
5133 } while (0)
5134
5135#define CALL_FN_W_W(lval, orig, arg1) \
5136 do { \
5137 volatile OrigFn _orig = (orig); \
5138 volatile unsigned long _argvec[2]; \
5139 volatile unsigned long _res; \
5140 _argvec[0] = (unsigned long)_orig.nraddr; \
5141 _argvec[1] = (unsigned long)(arg1); \
5142 __asm__ volatile( \
5143 "subu $29, $29, 8 \n\t" \
5144 "sw $28, 0($29) \n\t" \
5145 "sw $31, 4($29) \n\t" \
5146 "subu $29, $29, 16 \n\t" \
5147 "lw $4, 4(%1) \n\t" /* arg1*/ \
5148 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5149 VALGRIND_CALL_NOREDIR_T9 \
5150 "addu $29, $29, 16 \n\t" \
5151 "lw $28, 0($29) \n\t" \
5152 "lw $31, 4($29) \n\t" \
5153 "addu $29, $29, 8 \n\t" \
5154 "move %0, $2\n" \
5155 : /*out*/ "=r" (_res) \
5156 : /*in*/ "0" (&_argvec[0]) \
5157 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5158 ); \
5159 lval = (__typeof__(lval)) _res; \
5160 } while (0)
5161
5162#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5163 do { \
5164 volatile OrigFn _orig = (orig); \
5165 volatile unsigned long _argvec[3]; \
5166 volatile unsigned long _res; \
5167 _argvec[0] = (unsigned long)_orig.nraddr; \
5168 _argvec[1] = (unsigned long)(arg1); \
5169 _argvec[2] = (unsigned long)(arg2); \
5170 __asm__ volatile( \
5171 "subu $29, $29, 8 \n\t" \
5172 "sw $28, 0($29) \n\t" \
5173 "sw $31, 4($29) \n\t" \
5174 "subu $29, $29, 16 \n\t" \
5175 "lw $4, 4(%1) \n\t" \
5176 "lw $5, 8(%1) \n\t" \
5177 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5178 VALGRIND_CALL_NOREDIR_T9 \
5179 "addu $29, $29, 16 \n\t" \
5180 "lw $28, 0($29) \n\t" \
5181 "lw $31, 4($29) \n\t" \
5182 "addu $29, $29, 8 \n\t" \
5183 "move %0, $2\n" \
5184 : /*out*/ "=r" (_res) \
5185 : /*in*/ "0" (&_argvec[0]) \
5186 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5187 ); \
5188 lval = (__typeof__(lval)) _res; \
5189 } while (0)
5190
5191#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5192 do { \
5193 volatile OrigFn _orig = (orig); \
5194 volatile unsigned long _argvec[4]; \
5195 volatile unsigned long _res; \
5196 _argvec[0] = (unsigned long)_orig.nraddr; \
5197 _argvec[1] = (unsigned long)(arg1); \
5198 _argvec[2] = (unsigned long)(arg2); \
5199 _argvec[3] = (unsigned long)(arg3); \
5200 __asm__ volatile( \
5201 "subu $29, $29, 8 \n\t" \
5202 "sw $28, 0($29) \n\t" \
5203 "sw $31, 4($29) \n\t" \
5204 "subu $29, $29, 16 \n\t" \
5205 "lw $4, 4(%1) \n\t" \
5206 "lw $5, 8(%1) \n\t" \
5207 "lw $6, 12(%1) \n\t" \
5208 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5209 VALGRIND_CALL_NOREDIR_T9 \
5210 "addu $29, $29, 16 \n\t" \
5211 "lw $28, 0($29) \n\t" \
5212 "lw $31, 4($29) \n\t" \
5213 "addu $29, $29, 8 \n\t" \
5214 "move %0, $2\n" \
5215 : /*out*/ "=r" (_res) \
5216 : /*in*/ "0" (&_argvec[0]) \
5217 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5218 ); \
5219 lval = (__typeof__(lval)) _res; \
5220 } while (0)
5221
5222#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5223 do { \
5224 volatile OrigFn _orig = (orig); \
5225 volatile unsigned long _argvec[5]; \
5226 volatile unsigned long _res; \
5227 _argvec[0] = (unsigned long)_orig.nraddr; \
5228 _argvec[1] = (unsigned long)(arg1); \
5229 _argvec[2] = (unsigned long)(arg2); \
5230 _argvec[3] = (unsigned long)(arg3); \
5231 _argvec[4] = (unsigned long)(arg4); \
5232 __asm__ volatile( \
5233 "subu $29, $29, 8 \n\t" \
5234 "sw $28, 0($29) \n\t" \
5235 "sw $31, 4($29) \n\t" \
5236 "subu $29, $29, 16 \n\t" \
5237 "lw $4, 4(%1) \n\t" \
5238 "lw $5, 8(%1) \n\t" \
5239 "lw $6, 12(%1) \n\t" \
5240 "lw $7, 16(%1) \n\t" \
5241 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5242 VALGRIND_CALL_NOREDIR_T9 \
5243 "addu $29, $29, 16 \n\t" \
5244 "lw $28, 0($29) \n\t" \
5245 "lw $31, 4($29) \n\t" \
5246 "addu $29, $29, 8 \n\t" \
5247 "move %0, $2\n" \
5248 : /*out*/ "=r" (_res) \
5249 : /*in*/ "0" (&_argvec[0]) \
5250 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5251 ); \
5252 lval = (__typeof__(lval)) _res; \
5253 } while (0)
5254
5255#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5256 do { \
5257 volatile OrigFn _orig = (orig); \
5258 volatile unsigned long _argvec[6]; \
5259 volatile unsigned long _res; \
5260 _argvec[0] = (unsigned long)_orig.nraddr; \
5261 _argvec[1] = (unsigned long)(arg1); \
5262 _argvec[2] = (unsigned long)(arg2); \
5263 _argvec[3] = (unsigned long)(arg3); \
5264 _argvec[4] = (unsigned long)(arg4); \
5265 _argvec[5] = (unsigned long)(arg5); \
5266 __asm__ volatile( \
5267 "subu $29, $29, 8 \n\t" \
5268 "sw $28, 0($29) \n\t" \
5269 "sw $31, 4($29) \n\t" \
5270 "lw $4, 20(%1) \n\t" \
5271 "subu $29, $29, 24\n\t" \
5272 "sw $4, 16($29) \n\t" \
5273 "lw $4, 4(%1) \n\t" \
5274 "lw $5, 8(%1) \n\t" \
5275 "lw $6, 12(%1) \n\t" \
5276 "lw $7, 16(%1) \n\t" \
5277 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5278 VALGRIND_CALL_NOREDIR_T9 \
5279 "addu $29, $29, 24 \n\t" \
5280 "lw $28, 0($29) \n\t" \
5281 "lw $31, 4($29) \n\t" \
5282 "addu $29, $29, 8 \n\t" \
5283 "move %0, $2\n" \
5284 : /*out*/ "=r" (_res) \
5285 : /*in*/ "0" (&_argvec[0]) \
5286 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5287 ); \
5288 lval = (__typeof__(lval)) _res; \
5289 } while (0)
5290#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5291 do { \
5292 volatile OrigFn _orig = (orig); \
5293 volatile unsigned long _argvec[7]; \
5294 volatile unsigned long _res; \
5295 _argvec[0] = (unsigned long)_orig.nraddr; \
5296 _argvec[1] = (unsigned long)(arg1); \
5297 _argvec[2] = (unsigned long)(arg2); \
5298 _argvec[3] = (unsigned long)(arg3); \
5299 _argvec[4] = (unsigned long)(arg4); \
5300 _argvec[5] = (unsigned long)(arg5); \
5301 _argvec[6] = (unsigned long)(arg6); \
5302 __asm__ volatile( \
5303 "subu $29, $29, 8 \n\t" \
5304 "sw $28, 0($29) \n\t" \
5305 "sw $31, 4($29) \n\t" \
5306 "lw $4, 20(%1) \n\t" \
5307 "subu $29, $29, 32\n\t" \
5308 "sw $4, 16($29) \n\t" \
5309 "lw $4, 24(%1) \n\t" \
5310 "nop\n\t" \
5311 "sw $4, 20($29) \n\t" \
5312 "lw $4, 4(%1) \n\t" \
5313 "lw $5, 8(%1) \n\t" \
5314 "lw $6, 12(%1) \n\t" \
5315 "lw $7, 16(%1) \n\t" \
5316 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5317 VALGRIND_CALL_NOREDIR_T9 \
5318 "addu $29, $29, 32 \n\t" \
5319 "lw $28, 0($29) \n\t" \
5320 "lw $31, 4($29) \n\t" \
5321 "addu $29, $29, 8 \n\t" \
5322 "move %0, $2\n" \
5323 : /*out*/ "=r" (_res) \
5324 : /*in*/ "0" (&_argvec[0]) \
5325 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5326 ); \
5327 lval = (__typeof__(lval)) _res; \
5328 } while (0)
5329
5330#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5331 arg7) \
5332 do { \
5333 volatile OrigFn _orig = (orig); \
5334 volatile unsigned long _argvec[8]; \
5335 volatile unsigned long _res; \
5336 _argvec[0] = (unsigned long)_orig.nraddr; \
5337 _argvec[1] = (unsigned long)(arg1); \
5338 _argvec[2] = (unsigned long)(arg2); \
5339 _argvec[3] = (unsigned long)(arg3); \
5340 _argvec[4] = (unsigned long)(arg4); \
5341 _argvec[5] = (unsigned long)(arg5); \
5342 _argvec[6] = (unsigned long)(arg6); \
5343 _argvec[7] = (unsigned long)(arg7); \
5344 __asm__ volatile( \
5345 "subu $29, $29, 8 \n\t" \
5346 "sw $28, 0($29) \n\t" \
5347 "sw $31, 4($29) \n\t" \
5348 "lw $4, 20(%1) \n\t" \
5349 "subu $29, $29, 32\n\t" \
5350 "sw $4, 16($29) \n\t" \
5351 "lw $4, 24(%1) \n\t" \
5352 "sw $4, 20($29) \n\t" \
5353 "lw $4, 28(%1) \n\t" \
5354 "sw $4, 24($29) \n\t" \
5355 "lw $4, 4(%1) \n\t" \
5356 "lw $5, 8(%1) \n\t" \
5357 "lw $6, 12(%1) \n\t" \
5358 "lw $7, 16(%1) \n\t" \
5359 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5360 VALGRIND_CALL_NOREDIR_T9 \
5361 "addu $29, $29, 32 \n\t" \
5362 "lw $28, 0($29) \n\t" \
5363 "lw $31, 4($29) \n\t" \
5364 "addu $29, $29, 8 \n\t" \
5365 "move %0, $2\n" \
5366 : /*out*/ "=r" (_res) \
5367 : /*in*/ "0" (&_argvec[0]) \
5368 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5369 ); \
5370 lval = (__typeof__(lval)) _res; \
5371 } while (0)
5372
5373#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5374 arg7,arg8) \
5375 do { \
5376 volatile OrigFn _orig = (orig); \
5377 volatile unsigned long _argvec[9]; \
5378 volatile unsigned long _res; \
5379 _argvec[0] = (unsigned long)_orig.nraddr; \
5380 _argvec[1] = (unsigned long)(arg1); \
5381 _argvec[2] = (unsigned long)(arg2); \
5382 _argvec[3] = (unsigned long)(arg3); \
5383 _argvec[4] = (unsigned long)(arg4); \
5384 _argvec[5] = (unsigned long)(arg5); \
5385 _argvec[6] = (unsigned long)(arg6); \
5386 _argvec[7] = (unsigned long)(arg7); \
5387 _argvec[8] = (unsigned long)(arg8); \
5388 __asm__ volatile( \
5389 "subu $29, $29, 8 \n\t" \
5390 "sw $28, 0($29) \n\t" \
5391 "sw $31, 4($29) \n\t" \
5392 "lw $4, 20(%1) \n\t" \
5393 "subu $29, $29, 40\n\t" \
5394 "sw $4, 16($29) \n\t" \
5395 "lw $4, 24(%1) \n\t" \
5396 "sw $4, 20($29) \n\t" \
5397 "lw $4, 28(%1) \n\t" \
5398 "sw $4, 24($29) \n\t" \
5399 "lw $4, 32(%1) \n\t" \
5400 "sw $4, 28($29) \n\t" \
5401 "lw $4, 4(%1) \n\t" \
5402 "lw $5, 8(%1) \n\t" \
5403 "lw $6, 12(%1) \n\t" \
5404 "lw $7, 16(%1) \n\t" \
5405 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5406 VALGRIND_CALL_NOREDIR_T9 \
5407 "addu $29, $29, 40 \n\t" \
5408 "lw $28, 0($29) \n\t" \
5409 "lw $31, 4($29) \n\t" \
5410 "addu $29, $29, 8 \n\t" \
5411 "move %0, $2\n" \
5412 : /*out*/ "=r" (_res) \
5413 : /*in*/ "0" (&_argvec[0]) \
5414 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5415 ); \
5416 lval = (__typeof__(lval)) _res; \
5417 } while (0)
5418
5419#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5420 arg7,arg8,arg9) \
5421 do { \
5422 volatile OrigFn _orig = (orig); \
5423 volatile unsigned long _argvec[10]; \
5424 volatile unsigned long _res; \
5425 _argvec[0] = (unsigned long)_orig.nraddr; \
5426 _argvec[1] = (unsigned long)(arg1); \
5427 _argvec[2] = (unsigned long)(arg2); \
5428 _argvec[3] = (unsigned long)(arg3); \
5429 _argvec[4] = (unsigned long)(arg4); \
5430 _argvec[5] = (unsigned long)(arg5); \
5431 _argvec[6] = (unsigned long)(arg6); \
5432 _argvec[7] = (unsigned long)(arg7); \
5433 _argvec[8] = (unsigned long)(arg8); \
5434 _argvec[9] = (unsigned long)(arg9); \
5435 __asm__ volatile( \
5436 "subu $29, $29, 8 \n\t" \
5437 "sw $28, 0($29) \n\t" \
5438 "sw $31, 4($29) \n\t" \
5439 "lw $4, 20(%1) \n\t" \
5440 "subu $29, $29, 40\n\t" \
5441 "sw $4, 16($29) \n\t" \
5442 "lw $4, 24(%1) \n\t" \
5443 "sw $4, 20($29) \n\t" \
5444 "lw $4, 28(%1) \n\t" \
5445 "sw $4, 24($29) \n\t" \
5446 "lw $4, 32(%1) \n\t" \
5447 "sw $4, 28($29) \n\t" \
5448 "lw $4, 36(%1) \n\t" \
5449 "sw $4, 32($29) \n\t" \
5450 "lw $4, 4(%1) \n\t" \
5451 "lw $5, 8(%1) \n\t" \
5452 "lw $6, 12(%1) \n\t" \
5453 "lw $7, 16(%1) \n\t" \
5454 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5455 VALGRIND_CALL_NOREDIR_T9 \
5456 "addu $29, $29, 40 \n\t" \
5457 "lw $28, 0($29) \n\t" \
5458 "lw $31, 4($29) \n\t" \
5459 "addu $29, $29, 8 \n\t" \
5460 "move %0, $2\n" \
5461 : /*out*/ "=r" (_res) \
5462 : /*in*/ "0" (&_argvec[0]) \
5463 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5464 ); \
5465 lval = (__typeof__(lval)) _res; \
5466 } while (0)
5467
5468#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5469 arg7,arg8,arg9,arg10) \
5470 do { \
5471 volatile OrigFn _orig = (orig); \
5472 volatile unsigned long _argvec[11]; \
5473 volatile unsigned long _res; \
5474 _argvec[0] = (unsigned long)_orig.nraddr; \
5475 _argvec[1] = (unsigned long)(arg1); \
5476 _argvec[2] = (unsigned long)(arg2); \
5477 _argvec[3] = (unsigned long)(arg3); \
5478 _argvec[4] = (unsigned long)(arg4); \
5479 _argvec[5] = (unsigned long)(arg5); \
5480 _argvec[6] = (unsigned long)(arg6); \
5481 _argvec[7] = (unsigned long)(arg7); \
5482 _argvec[8] = (unsigned long)(arg8); \
5483 _argvec[9] = (unsigned long)(arg9); \
5484 _argvec[10] = (unsigned long)(arg10); \
5485 __asm__ volatile( \
5486 "subu $29, $29, 8 \n\t" \
5487 "sw $28, 0($29) \n\t" \
5488 "sw $31, 4($29) \n\t" \
5489 "lw $4, 20(%1) \n\t" \
5490 "subu $29, $29, 48\n\t" \
5491 "sw $4, 16($29) \n\t" \
5492 "lw $4, 24(%1) \n\t" \
5493 "sw $4, 20($29) \n\t" \
5494 "lw $4, 28(%1) \n\t" \
5495 "sw $4, 24($29) \n\t" \
5496 "lw $4, 32(%1) \n\t" \
5497 "sw $4, 28($29) \n\t" \
5498 "lw $4, 36(%1) \n\t" \
5499 "sw $4, 32($29) \n\t" \
5500 "lw $4, 40(%1) \n\t" \
5501 "sw $4, 36($29) \n\t" \
5502 "lw $4, 4(%1) \n\t" \
5503 "lw $5, 8(%1) \n\t" \
5504 "lw $6, 12(%1) \n\t" \
5505 "lw $7, 16(%1) \n\t" \
5506 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5507 VALGRIND_CALL_NOREDIR_T9 \
5508 "addu $29, $29, 48 \n\t" \
5509 "lw $28, 0($29) \n\t" \
5510 "lw $31, 4($29) \n\t" \
5511 "addu $29, $29, 8 \n\t" \
5512 "move %0, $2\n" \
5513 : /*out*/ "=r" (_res) \
5514 : /*in*/ "0" (&_argvec[0]) \
5515 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5516 ); \
5517 lval = (__typeof__(lval)) _res; \
5518 } while (0)
5519
5520#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5521 arg6,arg7,arg8,arg9,arg10, \
5522 arg11) \
5523 do { \
5524 volatile OrigFn _orig = (orig); \
5525 volatile unsigned long _argvec[12]; \
5526 volatile unsigned long _res; \
5527 _argvec[0] = (unsigned long)_orig.nraddr; \
5528 _argvec[1] = (unsigned long)(arg1); \
5529 _argvec[2] = (unsigned long)(arg2); \
5530 _argvec[3] = (unsigned long)(arg3); \
5531 _argvec[4] = (unsigned long)(arg4); \
5532 _argvec[5] = (unsigned long)(arg5); \
5533 _argvec[6] = (unsigned long)(arg6); \
5534 _argvec[7] = (unsigned long)(arg7); \
5535 _argvec[8] = (unsigned long)(arg8); \
5536 _argvec[9] = (unsigned long)(arg9); \
5537 _argvec[10] = (unsigned long)(arg10); \
5538 _argvec[11] = (unsigned long)(arg11); \
5539 __asm__ volatile( \
5540 "subu $29, $29, 8 \n\t" \
5541 "sw $28, 0($29) \n\t" \
5542 "sw $31, 4($29) \n\t" \
5543 "lw $4, 20(%1) \n\t" \
5544 "subu $29, $29, 48\n\t" \
5545 "sw $4, 16($29) \n\t" \
5546 "lw $4, 24(%1) \n\t" \
5547 "sw $4, 20($29) \n\t" \
5548 "lw $4, 28(%1) \n\t" \
5549 "sw $4, 24($29) \n\t" \
5550 "lw $4, 32(%1) \n\t" \
5551 "sw $4, 28($29) \n\t" \
5552 "lw $4, 36(%1) \n\t" \
5553 "sw $4, 32($29) \n\t" \
5554 "lw $4, 40(%1) \n\t" \
5555 "sw $4, 36($29) \n\t" \
5556 "lw $4, 44(%1) \n\t" \
5557 "sw $4, 40($29) \n\t" \
5558 "lw $4, 4(%1) \n\t" \
5559 "lw $5, 8(%1) \n\t" \
5560 "lw $6, 12(%1) \n\t" \
5561 "lw $7, 16(%1) \n\t" \
5562 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5563 VALGRIND_CALL_NOREDIR_T9 \
5564 "addu $29, $29, 48 \n\t" \
5565 "lw $28, 0($29) \n\t" \
5566 "lw $31, 4($29) \n\t" \
5567 "addu $29, $29, 8 \n\t" \
5568 "move %0, $2\n" \
5569 : /*out*/ "=r" (_res) \
5570 : /*in*/ "0" (&_argvec[0]) \
5571 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5572 ); \
5573 lval = (__typeof__(lval)) _res; \
5574 } while (0)
5575
5576#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5577 arg6,arg7,arg8,arg9,arg10, \
5578 arg11,arg12) \
5579 do { \
5580 volatile OrigFn _orig = (orig); \
5581 volatile unsigned long _argvec[13]; \
5582 volatile unsigned long _res; \
5583 _argvec[0] = (unsigned long)_orig.nraddr; \
5584 _argvec[1] = (unsigned long)(arg1); \
5585 _argvec[2] = (unsigned long)(arg2); \
5586 _argvec[3] = (unsigned long)(arg3); \
5587 _argvec[4] = (unsigned long)(arg4); \
5588 _argvec[5] = (unsigned long)(arg5); \
5589 _argvec[6] = (unsigned long)(arg6); \
5590 _argvec[7] = (unsigned long)(arg7); \
5591 _argvec[8] = (unsigned long)(arg8); \
5592 _argvec[9] = (unsigned long)(arg9); \
5593 _argvec[10] = (unsigned long)(arg10); \
5594 _argvec[11] = (unsigned long)(arg11); \
5595 _argvec[12] = (unsigned long)(arg12); \
5596 __asm__ volatile( \
5597 "subu $29, $29, 8 \n\t" \
5598 "sw $28, 0($29) \n\t" \
5599 "sw $31, 4($29) \n\t" \
5600 "lw $4, 20(%1) \n\t" \
5601 "subu $29, $29, 56\n\t" \
5602 "sw $4, 16($29) \n\t" \
5603 "lw $4, 24(%1) \n\t" \
5604 "sw $4, 20($29) \n\t" \
5605 "lw $4, 28(%1) \n\t" \
5606 "sw $4, 24($29) \n\t" \
5607 "lw $4, 32(%1) \n\t" \
5608 "sw $4, 28($29) \n\t" \
5609 "lw $4, 36(%1) \n\t" \
5610 "sw $4, 32($29) \n\t" \
5611 "lw $4, 40(%1) \n\t" \
5612 "sw $4, 36($29) \n\t" \
5613 "lw $4, 44(%1) \n\t" \
5614 "sw $4, 40($29) \n\t" \
5615 "lw $4, 48(%1) \n\t" \
5616 "sw $4, 44($29) \n\t" \
5617 "lw $4, 4(%1) \n\t" \
5618 "lw $5, 8(%1) \n\t" \
5619 "lw $6, 12(%1) \n\t" \
5620 "lw $7, 16(%1) \n\t" \
5621 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5622 VALGRIND_CALL_NOREDIR_T9 \
5623 "addu $29, $29, 56 \n\t" \
5624 "lw $28, 0($29) \n\t" \
5625 "lw $31, 4($29) \n\t" \
5626 "addu $29, $29, 8 \n\t" \
5627 "move %0, $2\n" \
5628 : /*out*/ "=r" (_res) \
5629 : /*in*/ "r" (&_argvec[0]) \
5630 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5631 ); \
5632 lval = (__typeof__(lval)) _res; \
5633 } while (0)
5634
5635#endif /* PLAT_mips32_linux */
5636
5637/* ------------------------- mips64-linux ------------------------- */
5638
5639#if defined(PLAT_mips64_linux)
5640
5641/* These regs are trashed by the hidden call. */
5642#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5643"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5644"$25", "$31"
5645
5646/* These CALL_FN_ macros assume that on mips64-linux,
5647 sizeof(long long) == 8. */
5648
5649#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
5650
5651#define CALL_FN_W_v(lval, orig) \
5652 do { \
5653 volatile OrigFn _orig = (orig); \
5654 volatile unsigned long long _argvec[1]; \
5655 volatile unsigned long long _res; \
5656 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5657 __asm__ volatile( \
5658 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5659 VALGRIND_CALL_NOREDIR_T9 \
5660 "move %0, $2\n" \
5661 : /*out*/ "=r" (_res) \
5662 : /*in*/ "0" (&_argvec[0]) \
5663 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5664 ); \
5665 lval = (__typeof__(lval)) (long)_res; \
5666 } while (0)
5667
5668#define CALL_FN_W_W(lval, orig, arg1) \
5669 do { \
5670 volatile OrigFn _orig = (orig); \
5671 volatile unsigned long long _argvec[2]; \
5672 volatile unsigned long long _res; \
5673 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5674 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5675 __asm__ volatile( \
5676 "ld $4, 8(%1)\n\t" /* arg1*/ \
5677 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5678 VALGRIND_CALL_NOREDIR_T9 \
5679 "move %0, $2\n" \
5680 : /*out*/ "=r" (_res) \
5681 : /*in*/ "r" (&_argvec[0]) \
5682 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5683 ); \
5684 lval = (__typeof__(lval)) (long)_res; \
5685 } while (0)
5686
5687#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5688 do { \
5689 volatile OrigFn _orig = (orig); \
5690 volatile unsigned long long _argvec[3]; \
5691 volatile unsigned long long _res; \
5692 _argvec[0] = _orig.nraddr; \
5693 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5694 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5695 __asm__ volatile( \
5696 "ld $4, 8(%1)\n\t" \
5697 "ld $5, 16(%1)\n\t" \
5698 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5699 VALGRIND_CALL_NOREDIR_T9 \
5700 "move %0, $2\n" \
5701 : /*out*/ "=r" (_res) \
5702 : /*in*/ "r" (&_argvec[0]) \
5703 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5704 ); \
5705 lval = (__typeof__(lval)) (long)_res; \
5706 } while (0)
5707
5708
5709#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5710 do { \
5711 volatile OrigFn _orig = (orig); \
5712 volatile unsigned long long _argvec[4]; \
5713 volatile unsigned long long _res; \
5714 _argvec[0] = _orig.nraddr; \
5715 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5716 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5717 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5718 __asm__ volatile( \
5719 "ld $4, 8(%1)\n\t" \
5720 "ld $5, 16(%1)\n\t" \
5721 "ld $6, 24(%1)\n\t" \
5722 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5723 VALGRIND_CALL_NOREDIR_T9 \
5724 "move %0, $2\n" \
5725 : /*out*/ "=r" (_res) \
5726 : /*in*/ "r" (&_argvec[0]) \
5727 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5728 ); \
5729 lval = (__typeof__(lval)) (long)_res; \
5730 } while (0)
5731
5732#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5733 do { \
5734 volatile OrigFn _orig = (orig); \
5735 volatile unsigned long long _argvec[5]; \
5736 volatile unsigned long long _res; \
5737 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5738 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5739 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5740 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5741 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5742 __asm__ volatile( \
5743 "ld $4, 8(%1)\n\t" \
5744 "ld $5, 16(%1)\n\t" \
5745 "ld $6, 24(%1)\n\t" \
5746 "ld $7, 32(%1)\n\t" \
5747 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5748 VALGRIND_CALL_NOREDIR_T9 \
5749 "move %0, $2\n" \
5750 : /*out*/ "=r" (_res) \
5751 : /*in*/ "r" (&_argvec[0]) \
5752 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5753 ); \
5754 lval = (__typeof__(lval)) (long)_res; \
5755 } while (0)
5756
5757#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5758 do { \
5759 volatile OrigFn _orig = (orig); \
5760 volatile unsigned long long _argvec[6]; \
5761 volatile unsigned long long _res; \
5762 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5763 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5764 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5765 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5766 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5767 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5768 __asm__ volatile( \
5769 "ld $4, 8(%1)\n\t" \
5770 "ld $5, 16(%1)\n\t" \
5771 "ld $6, 24(%1)\n\t" \
5772 "ld $7, 32(%1)\n\t" \
5773 "ld $8, 40(%1)\n\t" \
5774 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5775 VALGRIND_CALL_NOREDIR_T9 \
5776 "move %0, $2\n" \
5777 : /*out*/ "=r" (_res) \
5778 : /*in*/ "r" (&_argvec[0]) \
5779 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5780 ); \
5781 lval = (__typeof__(lval)) (long)_res; \
5782 } while (0)
5783
5784#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5785 do { \
5786 volatile OrigFn _orig = (orig); \
5787 volatile unsigned long long _argvec[7]; \
5788 volatile unsigned long long _res; \
5789 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5790 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5791 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5792 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5793 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5794 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5795 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5796 __asm__ volatile( \
5797 "ld $4, 8(%1)\n\t" \
5798 "ld $5, 16(%1)\n\t" \
5799 "ld $6, 24(%1)\n\t" \
5800 "ld $7, 32(%1)\n\t" \
5801 "ld $8, 40(%1)\n\t" \
5802 "ld $9, 48(%1)\n\t" \
5803 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5804 VALGRIND_CALL_NOREDIR_T9 \
5805 "move %0, $2\n" \
5806 : /*out*/ "=r" (_res) \
5807 : /*in*/ "r" (&_argvec[0]) \
5808 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5809 ); \
5810 lval = (__typeof__(lval)) (long)_res; \
5811 } while (0)
5812
5813#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5814 arg7) \
5815 do { \
5816 volatile OrigFn _orig = (orig); \
5817 volatile unsigned long long _argvec[8]; \
5818 volatile unsigned long long _res; \
5819 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5820 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5821 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5822 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5823 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5824 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5825 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5826 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5827 __asm__ volatile( \
5828 "ld $4, 8(%1)\n\t" \
5829 "ld $5, 16(%1)\n\t" \
5830 "ld $6, 24(%1)\n\t" \
5831 "ld $7, 32(%1)\n\t" \
5832 "ld $8, 40(%1)\n\t" \
5833 "ld $9, 48(%1)\n\t" \
5834 "ld $10, 56(%1)\n\t" \
5835 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5836 VALGRIND_CALL_NOREDIR_T9 \
5837 "move %0, $2\n" \
5838 : /*out*/ "=r" (_res) \
5839 : /*in*/ "r" (&_argvec[0]) \
5840 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5841 ); \
5842 lval = (__typeof__(lval)) (long)_res; \
5843 } while (0)
5844
5845#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5846 arg7,arg8) \
5847 do { \
5848 volatile OrigFn _orig = (orig); \
5849 volatile unsigned long long _argvec[9]; \
5850 volatile unsigned long long _res; \
5851 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5852 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5853 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5854 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5855 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5856 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5857 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5858 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5859 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5860 __asm__ volatile( \
5861 "ld $4, 8(%1)\n\t" \
5862 "ld $5, 16(%1)\n\t" \
5863 "ld $6, 24(%1)\n\t" \
5864 "ld $7, 32(%1)\n\t" \
5865 "ld $8, 40(%1)\n\t" \
5866 "ld $9, 48(%1)\n\t" \
5867 "ld $10, 56(%1)\n\t" \
5868 "ld $11, 64(%1)\n\t" \
5869 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5870 VALGRIND_CALL_NOREDIR_T9 \
5871 "move %0, $2\n" \
5872 : /*out*/ "=r" (_res) \
5873 : /*in*/ "r" (&_argvec[0]) \
5874 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5875 ); \
5876 lval = (__typeof__(lval)) (long)_res; \
5877 } while (0)
5878
5879#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5880 arg7,arg8,arg9) \
5881 do { \
5882 volatile OrigFn _orig = (orig); \
5883 volatile unsigned long long _argvec[10]; \
5884 volatile unsigned long long _res; \
5885 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5886 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5887 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5888 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5889 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5890 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5891 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5892 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5893 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5894 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5895 __asm__ volatile( \
5896 "dsubu $29, $29, 8\n\t" \
5897 "ld $4, 72(%1)\n\t" \
5898 "sd $4, 0($29)\n\t" \
5899 "ld $4, 8(%1)\n\t" \
5900 "ld $5, 16(%1)\n\t" \
5901 "ld $6, 24(%1)\n\t" \
5902 "ld $7, 32(%1)\n\t" \
5903 "ld $8, 40(%1)\n\t" \
5904 "ld $9, 48(%1)\n\t" \
5905 "ld $10, 56(%1)\n\t" \
5906 "ld $11, 64(%1)\n\t" \
5907 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5908 VALGRIND_CALL_NOREDIR_T9 \
5909 "daddu $29, $29, 8\n\t" \
5910 "move %0, $2\n" \
5911 : /*out*/ "=r" (_res) \
5912 : /*in*/ "r" (&_argvec[0]) \
5913 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5914 ); \
5915 lval = (__typeof__(lval)) (long)_res; \
5916 } while (0)
5917
5918#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5919 arg7,arg8,arg9,arg10) \
5920 do { \
5921 volatile OrigFn _orig = (orig); \
5922 volatile unsigned long long _argvec[11]; \
5923 volatile unsigned long long _res; \
5924 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5925 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5926 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5927 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5928 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5929 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5930 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5931 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5932 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5933 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5934 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
5935 __asm__ volatile( \
5936 "dsubu $29, $29, 16\n\t" \
5937 "ld $4, 72(%1)\n\t" \
5938 "sd $4, 0($29)\n\t" \
5939 "ld $4, 80(%1)\n\t" \
5940 "sd $4, 8($29)\n\t" \
5941 "ld $4, 8(%1)\n\t" \
5942 "ld $5, 16(%1)\n\t" \
5943 "ld $6, 24(%1)\n\t" \
5944 "ld $7, 32(%1)\n\t" \
5945 "ld $8, 40(%1)\n\t" \
5946 "ld $9, 48(%1)\n\t" \
5947 "ld $10, 56(%1)\n\t" \
5948 "ld $11, 64(%1)\n\t" \
5949 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5950 VALGRIND_CALL_NOREDIR_T9 \
5951 "daddu $29, $29, 16\n\t" \
5952 "move %0, $2\n" \
5953 : /*out*/ "=r" (_res) \
5954 : /*in*/ "r" (&_argvec[0]) \
5955 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5956 ); \
5957 lval = (__typeof__(lval)) (long)_res; \
5958 } while (0)
5959
5960#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5961 arg6,arg7,arg8,arg9,arg10, \
5962 arg11) \
5963 do { \
5964 volatile OrigFn _orig = (orig); \
5965 volatile unsigned long long _argvec[12]; \
5966 volatile unsigned long long _res; \
5967 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5968 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5969 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5970 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5971 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5972 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5973 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5974 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5975 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5976 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5977 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
5978 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
5979 __asm__ volatile( \
5980 "dsubu $29, $29, 24\n\t" \
5981 "ld $4, 72(%1)\n\t" \
5982 "sd $4, 0($29)\n\t" \
5983 "ld $4, 80(%1)\n\t" \
5984 "sd $4, 8($29)\n\t" \
5985 "ld $4, 88(%1)\n\t" \
5986 "sd $4, 16($29)\n\t" \
5987 "ld $4, 8(%1)\n\t" \
5988 "ld $5, 16(%1)\n\t" \
5989 "ld $6, 24(%1)\n\t" \
5990 "ld $7, 32(%1)\n\t" \
5991 "ld $8, 40(%1)\n\t" \
5992 "ld $9, 48(%1)\n\t" \
5993 "ld $10, 56(%1)\n\t" \
5994 "ld $11, 64(%1)\n\t" \
5995 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5996 VALGRIND_CALL_NOREDIR_T9 \
5997 "daddu $29, $29, 24\n\t" \
5998 "move %0, $2\n" \
5999 : /*out*/ "=r" (_res) \
6000 : /*in*/ "r" (&_argvec[0]) \
6001 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6002 ); \
6003 lval = (__typeof__(lval)) (long)_res; \
6004 } while (0)
6005
6006#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6007 arg6,arg7,arg8,arg9,arg10, \
6008 arg11,arg12) \
6009 do { \
6010 volatile OrigFn _orig = (orig); \
6011 volatile unsigned long long _argvec[13]; \
6012 volatile unsigned long long _res; \
6013 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6014 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6015 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6016 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6017 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6018 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6019 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6020 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6021 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6022 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6023 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6024 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6025 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6026 __asm__ volatile( \
6027 "dsubu $29, $29, 32\n\t" \
6028 "ld $4, 72(%1)\n\t" \
6029 "sd $4, 0($29)\n\t" \
6030 "ld $4, 80(%1)\n\t" \
6031 "sd $4, 8($29)\n\t" \
6032 "ld $4, 88(%1)\n\t" \
6033 "sd $4, 16($29)\n\t" \
6034 "ld $4, 96(%1)\n\t" \
6035 "sd $4, 24($29)\n\t" \
6036 "ld $4, 8(%1)\n\t" \
6037 "ld $5, 16(%1)\n\t" \
6038 "ld $6, 24(%1)\n\t" \
6039 "ld $7, 32(%1)\n\t" \
6040 "ld $8, 40(%1)\n\t" \
6041 "ld $9, 48(%1)\n\t" \
6042 "ld $10, 56(%1)\n\t" \
6043 "ld $11, 64(%1)\n\t" \
6044 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6045 VALGRIND_CALL_NOREDIR_T9 \
6046 "daddu $29, $29, 32\n\t" \
6047 "move %0, $2\n" \
6048 : /*out*/ "=r" (_res) \
6049 : /*in*/ "r" (&_argvec[0]) \
6050 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6051 ); \
6052 lval = (__typeof__(lval)) (long)_res; \
6053 } while (0)
6054
6055#endif /* PLAT_mips64_linux */
6056
6057/* ------------------------------------------------------------------ */
6058/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6059/* */
6060/* ------------------------------------------------------------------ */
6061
6062/* Some request codes. There are many more of these, but most are not
6063 exposed to end-user view. These are the public ones, all of the
6064 form 0x1000 + small_number.
6065
6066 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6067 ones start at 0x2000.
6068*/
6069
6070/* These macros are used by tools -- they must be public, but don't
6071 embed them into other programs. */
6072#define VG_USERREQ_TOOL_BASE(a,b) \
6073 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6074#define VG_IS_TOOL_USERREQ(a, b, v) \
6075 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6076
6077/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6078 This enum comprises an ABI exported by Valgrind to programs
6079 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6080 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6081 relevant group. */
6082typedef
6083 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6084 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6085
6086 /* These allow any function to be called from the simulated
6087 CPU but run on the real CPU. Nb: the first arg passed to
6088 the function is always the ThreadId of the running
6089 thread! So CLIENT_CALL0 actually requires a 1 arg
6090 function, etc. */
6091 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6092 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6093 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6094 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6095
6096 /* Can be useful in regression testing suites -- eg. can
6097 send Valgrind's output to /dev/null and still count
6098 errors. */
6099 VG_USERREQ__COUNT_ERRORS = 0x1201,
6100
6101 /* Allows the client program and/or gdbserver to execute a monitor
6102 command. */
6103 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6104
6105 /* These are useful and can be interpreted by any tool that
6106 tracks malloc() et al, by using vg_replace_malloc.c. */
6107 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6108 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6109 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6110 /* Memory pool support. */
6111 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6112 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6113 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6114 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6115 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6116 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6117 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6118 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6119
6120 /* Allow printfs to valgrind log. */
6121 /* The first two pass the va_list argument by value, which
6122 assumes it is the same size as or smaller than a UWord,
6123 which generally isn't the case. Hence are deprecated.
6124 The second two pass the vargs by reference and so are
6125 immune to this problem. */
6126 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6127 VG_USERREQ__PRINTF = 0x1401,
6128 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6129 /* both :: char* fmt, va_list* vargs */
6130 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6131 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6132
6133 /* Stack support. */
6134 VG_USERREQ__STACK_REGISTER = 0x1501,
6135 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6136 VG_USERREQ__STACK_CHANGE = 0x1503,
6137
6138 /* Wine support */
6139 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6140
6141 /* Querying of debug info. */
6142 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6143
6144 /* Disable/enable error reporting level. Takes a single
6145 Word arg which is the delta to this thread's error
6146 disablement indicator. Hence 1 disables or further
6147 disables errors, and -1 moves back towards enablement.
6148 Other values are not allowed. */
6149 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6150
6151 /* Some requests used for Valgrind internal, such as
6152 self-test or self-hosting. */
6153 /* Initialise IR injection */
6154 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6155 /* Used by Inner Valgrind to inform Outer Valgrind where to
6156 find the list of inner guest threads */
6157 VG_USERREQ__INNER_THREADS = 0x1902,
6158 } Vg_ClientRequest;
6159
6160#if !defined(__GNUC__)
6161# define __extension__ /* */
6162#endif
6163
6164
6165/* Returns the number of Valgrinds this code is running under. That
6166 is, 0 if running natively, 1 if running under Valgrind, 2 if
6167 running under Valgrind which is running under another Valgrind,
6168 etc. */
6169#define RUNNING_ON_VALGRIND \
6170 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6171 VG_USERREQ__RUNNING_ON_VALGRIND, \
6172 0, 0, 0, 0, 0) \
6173
6174
6175/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6176 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6177 since it provides a way to make sure valgrind will retranslate the
6178 invalidated area. Returns no value. */
6179#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6180 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6181 _qzz_addr, _qzz_len, 0, 0, 0)
6182
6183#define VALGRIND_INNER_THREADS(_qzz_addr) \
6184 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6185 _qzz_addr, 0, 0, 0, 0)
6186
6187
6188/* These requests are for getting Valgrind itself to print something.
6189 Possibly with a backtrace. This is a really ugly hack. The return value
6190 is the number of characters printed, excluding the "**<pid>** " part at the
6191 start and the backtrace (if present). */
6192
6193#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6194/* Modern GCC will optimize the static routine out if unused,
6195 and unused attribute will shut down warnings about it. */
6196static int VALGRIND_PRINTF(const char *format, ...)
6197 __attribute__((format(__printf__, 1, 2), __unused__));
6198#endif
6199static int
6200#if defined(_MSC_VER)
6202#endif
6203VALGRIND_PRINTF(const char *format, ...)
6204{
6205#if defined(NVALGRIND)
6206 (void)format;
6207 return 0;
6208#else /* NVALGRIND */
6209#if defined(_MSC_VER) || defined(__MINGW64__)
6211#else
6212 unsigned long _qzz_res;
6213#endif
6214 va_list vargs;
6215 va_start(vargs, format);
6216#if defined(_MSC_VER) || defined(__MINGW64__)
6217 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6218 VG_USERREQ__PRINTF_VALIST_BY_REF,
6219 (uintptr_t)format,
6220 (uintptr_t)&vargs,
6221 0, 0, 0);
6222#else
6223 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6224 VG_USERREQ__PRINTF_VALIST_BY_REF,
6225 (unsigned long)format,
6226 (unsigned long)&vargs,
6227 0, 0, 0);
6228#endif
6229 va_end(vargs);
6230 return (int)_qzz_res;
6231#endif /* NVALGRIND */
6232}
6233
6234#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6235static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6236 __attribute__((format(__printf__, 1, 2), __unused__));
6237#endif
6238static int
6239#if defined(_MSC_VER)
6241#endif
6242VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6243{
6244#if defined(NVALGRIND)
6245 (void)format;
6246 return 0;
6247#else /* NVALGRIND */
6248#if defined(_MSC_VER) || defined(__MINGW64__)
6250#else
6251 unsigned long _qzz_res;
6252#endif
6253 va_list vargs;
6254 va_start(vargs, format);
6255#if defined(_MSC_VER) || defined(__MINGW64__)
6256 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6257 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6258 (uintptr_t)format,
6259 (uintptr_t)&vargs,
6260 0, 0, 0);
6261#else
6262 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6263 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6264 (unsigned long)format,
6265 (unsigned long)&vargs,
6266 0, 0, 0);
6267#endif
6268 va_end(vargs);
6269 return (int)_qzz_res;
6270#endif /* NVALGRIND */
6271}
6272
6273
6274/* These requests allow control to move from the simulated CPU to the
6275 real CPU, calling an arbitrary function.
6276
6277 Note that the current ThreadId is inserted as the first argument.
6278 So this call:
6279
6280 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6281
6282 requires f to have this signature:
6283
6284 Word f(Word tid, Word arg1, Word arg2)
6285
6286 where "Word" is a word-sized type.
6287
6288 Note that these client requests are not entirely reliable. For example,
6289 if you call a function with them that subsequently calls printf(),
6290 there's a high chance Valgrind will crash. Generally, your prospects of
6291 these working are made higher if the called function does not refer to
6292 any global variables, and does not refer to any libc or other functions
6293 (printf et al). Any kind of entanglement with libc or dynamic linking is
6294 likely to have a bad outcome, for tricky reasons which we've grappled
6295 with a lot in the past.
6296*/
6297#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6298 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6299 VG_USERREQ__CLIENT_CALL0, \
6300 _qyy_fn, \
6301 0, 0, 0, 0)
6302
6303#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6304 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6305 VG_USERREQ__CLIENT_CALL1, \
6306 _qyy_fn, \
6307 _qyy_arg1, 0, 0, 0)
6308
6309#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6310 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6311 VG_USERREQ__CLIENT_CALL2, \
6312 _qyy_fn, \
6313 _qyy_arg1, _qyy_arg2, 0, 0)
6314
6315#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6316 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6317 VG_USERREQ__CLIENT_CALL3, \
6318 _qyy_fn, \
6319 _qyy_arg1, _qyy_arg2, \
6320 _qyy_arg3, 0)
6321
6322
6323/* Counts the number of errors that have been recorded by a tool. Nb:
6324 the tool must record the errors with VG_(maybe_record_error)() or
6325 VG_(unique_error)() for them to be counted. */
6326#define VALGRIND_COUNT_ERRORS \
6327 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6328 0 /* default return */, \
6329 VG_USERREQ__COUNT_ERRORS, \
6330 0, 0, 0, 0, 0)
6331
6332/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6333 when heap blocks are allocated in order to give accurate results. This
6334 happens automatically for the standard allocator functions such as
6335 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6336 delete[], etc.
6337
6338 But if your program uses a custom allocator, this doesn't automatically
6339 happen, and Valgrind will not do as well. For example, if you allocate
6340 superblocks with mmap() and then allocates chunks of the superblocks, all
6341 Valgrind's observations will be at the mmap() level and it won't know that
6342 the chunks should be considered separate entities. In Memcheck's case,
6343 that means you probably won't get heap block overrun detection (because
6344 there won't be redzones marked as unaddressable) and you definitely won't
6345 get any leak detection.
6346
6347 The following client requests allow a custom allocator to be annotated so
6348 that it can be handled accurately by Valgrind.
6349
6350 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6351 by a malloc()-like function. For Memcheck (an illustrative case), this
6352 does two things:
6353
6354 - It records that the block has been allocated. This means any addresses
6355 within the block mentioned in error messages will be
6356 identified as belonging to the block. It also means that if the block
6357 isn't freed it will be detected by the leak checker.
6358
6359 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6360 not set), or addressable and defined (if 'is_zeroed' is set). This
6361 controls how accesses to the block by the program are handled.
6362
6363 'addr' is the start of the usable block (ie. after any
6364 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6365 can apply redzones -- these are blocks of padding at the start and end of
6366 each block. Adding redzones is recommended as it makes it much more likely
6367 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6368 zeroed (or filled with another predictable value), as is the case for
6369 calloc().
6370
6371 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6372 heap block -- that will be used by the client program -- is allocated.
6373 It's best to put it at the outermost level of the allocator if possible;
6374 for example, if you have a function my_alloc() which calls
6375 internal_alloc(), and the client request is put inside internal_alloc(),
6376 stack traces relating to the heap block will contain entries for both
6377 my_alloc() and internal_alloc(), which is probably not what you want.
6378
6379 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6380 custom blocks from within a heap block, B, that has been allocated with
6381 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6382 -- the custom blocks will take precedence.
6383
6384 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6385 Memcheck, it does two things:
6386
6387 - It records that the block has been deallocated. This assumes that the
6388 block was annotated as having been allocated via
6389 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6390
6391 - It marks the block as being unaddressable.
6392
6393 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6394 heap block is deallocated.
6395
6396 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6397 Memcheck, it does four things:
6398
6399 - It records that the size of a block has been changed. This assumes that
6400 the block was annotated as having been allocated via
6401 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6402
6403 - If the block shrunk, it marks the freed memory as being unaddressable.
6404
6405 - If the block grew, it marks the new area as undefined and defines a red
6406 zone past the end of the new block.
6407
6408 - The V-bits of the overlap between the old and the new block are preserved.
6409
6410 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6411 and before deallocation of the old block.
6412
6413 In many cases, these three client requests will not be enough to get your
6414 allocator working well with Memcheck. More specifically, if your allocator
6415 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6416 will be necessary to mark the memory as addressable just before the zeroing
6417 occurs, otherwise you'll get a lot of invalid write errors. For example,
6418 you'll need to do this if your allocator recycles freed blocks, but it
6419 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6420 Alternatively, if your allocator reuses freed blocks for allocator-internal
6421 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6422
6423 Really, what's happening is a blurring of the lines between the client
6424 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6425 memory should be considered unaddressable to the client program, but the
6426 allocator knows more than the rest of the client program and so may be able
6427 to safely access it. Extra client requests are necessary for Valgrind to
6428 understand the distinction between the allocator and the rest of the
6429 program.
6430
6431 Ignored if addr == 0.
6432*/
6433#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6434 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6435 addr, sizeB, rzB, is_zeroed, 0)
6436
6437/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6438 Ignored if addr == 0.
6439*/
6440#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6441 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6442 addr, oldSizeB, newSizeB, rzB, 0)
6443
6444/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6445 Ignored if addr == 0.
6446*/
6447#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6448 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6449 addr, rzB, 0, 0, 0)
6450
6451/* Create a memory pool. */
6452#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6453 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6454 pool, rzB, is_zeroed, 0, 0)
6455
6456/* Create a memory pool with some flags specifying extended behaviour.
6457 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6458
6459 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6460 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6461 by the application as superblocks to dole out MALLOC_LIKE blocks using
6462 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6463 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6464 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6465 Note that the association between the pool and the second level blocks
6466 is implicit : second level blocks will be located inside first level
6467 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6468 for such 2 levels pools, as otherwise valgrind will detect overlapping
6469 memory blocks, and will abort execution (e.g. during leak search).
6470
6471 Such a meta pool can also be marked as an 'auto free' pool using the flag
6472 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6473 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6474 will automatically free the second level blocks that are contained
6475 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6476 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6477 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6478 in the first level block.
6479 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6480 without the VALGRIND_MEMPOOL_METAPOOL flag.
6481*/
6482#define VALGRIND_MEMPOOL_AUTO_FREE 1
6483#define VALGRIND_MEMPOOL_METAPOOL 2
6484#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6485 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6486 pool, rzB, is_zeroed, flags, 0)
6487
6488/* Destroy a memory pool. */
6489#define VALGRIND_DESTROY_MEMPOOL(pool) \
6490 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6491 pool, 0, 0, 0, 0)
6492
6493/* Associate a piece of memory with a memory pool. */
6494#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6495 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6496 pool, addr, size, 0, 0)
6497
6498/* Disassociate a piece of memory from a memory pool. */
6499#define VALGRIND_MEMPOOL_FREE(pool, addr) \
6500 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6501 pool, addr, 0, 0, 0)
6502
6503/* Disassociate any pieces outside a particular range. */
6504#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6505 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6506 pool, addr, size, 0, 0)
6507
6508/* Resize and/or move a piece associated with a memory pool. */
6509#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6510 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6511 poolA, poolB, 0, 0, 0)
6512
6513/* Resize and/or move a piece associated with a memory pool. */
6514#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6515 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6516 pool, addrA, addrB, size, 0)
6517
6518/* Return 1 if a mempool exists, else 0. */
6519#define VALGRIND_MEMPOOL_EXISTS(pool) \
6520 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6521 VG_USERREQ__MEMPOOL_EXISTS, \
6522 pool, 0, 0, 0, 0)
6523
6524/* Mark a piece of memory as being a stack. Returns a stack id.
6525 start is the lowest addressable stack byte, end is the highest
6526 addressable stack byte. */
6527#define VALGRIND_STACK_REGISTER(start, end) \
6528 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6529 VG_USERREQ__STACK_REGISTER, \
6530 start, end, 0, 0, 0)
6531
6532/* Unmark the piece of memory associated with a stack id as being a
6533 stack. */
6534#define VALGRIND_STACK_DEREGISTER(id) \
6535 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6536 id, 0, 0, 0, 0)
6537
6538/* Change the start and end address of the stack id.
6539 start is the new lowest addressable stack byte, end is the new highest
6540 addressable stack byte. */
6541#define VALGRIND_STACK_CHANGE(id, start, end) \
6542 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6543 id, start, end, 0, 0)
6544
6545/* Load PDB debug info for Wine PE image_map. */
6546#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6547 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6548 fd, ptr, total_size, delta, 0)
6549
6550/* Map a code address to a source file name and line number. buf64
6551 must point to a 64-byte buffer in the caller's address space. The
6552 result will be dumped in there and is guaranteed to be zero
6553 terminated. If no info is found, the first byte is set to zero. */
6554#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6555 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6556 VG_USERREQ__MAP_IP_TO_SRCLOC, \
6557 addr, buf64, 0, 0, 0)
6558
6559/* Disable error reporting for this thread. Behaves in a stack like
6560 way, so you can safely call this multiple times provided that
6561 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6562 to re-enable reporting. The first call of this macro disables
6563 reporting. Subsequent calls have no effect except to increase the
6564 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6565 reporting. Child threads do not inherit this setting from their
6566 parents -- they are always created with reporting enabled. */
6567#define VALGRIND_DISABLE_ERROR_REPORTING \
6568 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6569 1, 0, 0, 0, 0)
6570
6571/* Re-enable error reporting, as per comments on
6572 VALGRIND_DISABLE_ERROR_REPORTING. */
6573#define VALGRIND_ENABLE_ERROR_REPORTING \
6574 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6575 -1, 0, 0, 0, 0)
6576
6577/* Execute a monitor command from the client program.
6578 If a connection is opened with GDB, the output will be sent
6579 according to the output mode set for vgdb.
6580 If no connection is opened, output will go to the log output.
6581 Returns 1 if command not recognised, 0 otherwise. */
6582#define VALGRIND_MONITOR_COMMAND(command) \
6583 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6584 command, 0, 0, 0, 0)
6585
6586
6587#undef PLAT_x86_darwin
6588#undef PLAT_amd64_darwin
6589#undef PLAT_x86_win32
6590#undef PLAT_amd64_win64
6591#undef PLAT_x86_linux
6592#undef PLAT_amd64_linux
6593#undef PLAT_ppc32_linux
6594#undef PLAT_ppc64be_linux
6595#undef PLAT_ppc64le_linux
6596#undef PLAT_arm_linux
6597#undef PLAT_s390x_linux
6598#undef PLAT_mips32_linux
6599#undef PLAT_mips64_linux
6600#undef PLAT_x86_solaris
6601#undef PLAT_amd64_solaris
6602
6603#endif /* __VALGRIND_H */
This file is part of the KDE documentation.
Documentation copyright © 1996-2024 The KDE developers.
Generated on Tue Mar 26 2024 11:20:16 by doxygen 1.10.0 written by Dimitri van Heesch, © 1997-2006

KDE's Doxygen guidelines are available online.