blob: 066e74f666ae95e4c9f58c42010bc857ca6454ca [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ASM_LINKAGE_H
2#define __ASM_LINKAGE_H
3
4#define __ALIGN .align 4
5#define __ALIGN_STR ".align 4"
6
Andreas Schwab8474ba72015-09-23 23:12:09 +02007/*
8 * Make sure the compiler doesn't do anything stupid with the
9 * arguments on the stack - they are owned by the *caller*, not
10 * the callee. This just fools gcc into not spilling into them,
11 * and keeps it from doing tailcall recursion and/or using the
12 * stack slots for temporaries, since they are live and "used"
13 * all the way to the end of the function.
14 */
15#define asmlinkage_protect(n, ret, args...) \
16 __asmlinkage_protect##n(ret, ##args)
17#define __asmlinkage_protect_n(ret, args...) \
18 __asm__ __volatile__ ("" : "=r" (ret) : "0" (ret), ##args)
19#define __asmlinkage_protect0(ret) \
20 __asmlinkage_protect_n(ret)
21#define __asmlinkage_protect1(ret, arg1) \
22 __asmlinkage_protect_n(ret, "m" (arg1))
23#define __asmlinkage_protect2(ret, arg1, arg2) \
24 __asmlinkage_protect_n(ret, "m" (arg1), "m" (arg2))
25#define __asmlinkage_protect3(ret, arg1, arg2, arg3) \
26 __asmlinkage_protect_n(ret, "m" (arg1), "m" (arg2), "m" (arg3))
27#define __asmlinkage_protect4(ret, arg1, arg2, arg3, arg4) \
28 __asmlinkage_protect_n(ret, "m" (arg1), "m" (arg2), "m" (arg3), \
29 "m" (arg4))
30#define __asmlinkage_protect5(ret, arg1, arg2, arg3, arg4, arg5) \
31 __asmlinkage_protect_n(ret, "m" (arg1), "m" (arg2), "m" (arg3), \
32 "m" (arg4), "m" (arg5))
33#define __asmlinkage_protect6(ret, arg1, arg2, arg3, arg4, arg5, arg6) \
34 __asmlinkage_protect_n(ret, "m" (arg1), "m" (arg2), "m" (arg3), \
35 "m" (arg4), "m" (arg5), "m" (arg6))
36
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#endif