blob: 48142971b25d095b9724a06a6eff47b2cf302532 [file] [log] [blame]
H. Peter Anvin1965aae2008-10-22 22:26:29 -07001#ifndef _ASM_X86_LINKAGE_H
2#define _ASM_X86_LINKAGE_H
Harvey Harrison82f74e72008-01-30 13:31:07 +01003
Cyrill Gorcunovbb7f5f62009-03-09 20:19:51 +03004#include <linux/stringify.h>
5
Ingo Molnar78a99092008-04-17 17:40:51 +02006#undef notrace
7#define notrace __attribute__((no_instrument_function))
8
Harvey Harrison82f74e72008-01-30 13:31:07 +01009#ifdef CONFIG_X86_32
10#define asmlinkage CPP_ASMLINKAGE __attribute__((regparm(0)))
Roland McGrath54a01512008-04-10 15:37:38 -070011
Linus Torvaldsd10d89e2008-04-10 17:35:23 -070012/*
13 * Make sure the compiler doesn't do anything stupid with the
14 * arguments on the stack - they are owned by the *caller*, not
15 * the callee. This just fools gcc into not spilling into them,
16 * and keeps it from doing tailcall recursion and/or using the
17 * stack slots for temporaries, since they are live and "used"
18 * all the way to the end of the function.
19 *
20 * NOTE! On x86-64, all the arguments are in registers, so this
21 * only matters on a 32-bit kernel.
22 */
Roland McGrath54a01512008-04-10 15:37:38 -070023#define asmlinkage_protect(n, ret, args...) \
24 __asmlinkage_protect##n(ret, ##args)
25#define __asmlinkage_protect_n(ret, args...) \
26 __asm__ __volatile__ ("" : "=r" (ret) : "0" (ret), ##args)
27#define __asmlinkage_protect0(ret) \
28 __asmlinkage_protect_n(ret)
29#define __asmlinkage_protect1(ret, arg1) \
30 __asmlinkage_protect_n(ret, "g" (arg1))
31#define __asmlinkage_protect2(ret, arg1, arg2) \
32 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2))
33#define __asmlinkage_protect3(ret, arg1, arg2, arg3) \
34 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3))
35#define __asmlinkage_protect4(ret, arg1, arg2, arg3, arg4) \
36 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \
37 "g" (arg4))
38#define __asmlinkage_protect5(ret, arg1, arg2, arg3, arg4, arg5) \
39 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \
40 "g" (arg4), "g" (arg5))
41#define __asmlinkage_protect6(ret, arg1, arg2, arg3, arg4, arg5, arg6) \
42 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \
43 "g" (arg4), "g" (arg5), "g" (arg6))
44
Cyrill Gorcunov7ab15242009-03-06 19:08:34 +030045#endif /* CONFIG_X86_32 */
46
47#ifdef __ASSEMBLY__
Harvey Harrison82f74e72008-01-30 13:31:07 +010048
Cyrill Gorcunov95695542009-02-14 00:50:18 +030049#define GLOBAL(name) \
50 .globl name; \
51 name:
52
Cyrill Gorcunovbb7f5f62009-03-09 20:19:51 +030053#if defined(CONFIG_X86_64) || defined(CONFIG_X86_ALIGNMENT_16)
54#define __ALIGN .p2align 4, 0x90
55#define __ALIGN_STR __stringify(__ALIGN)
Harvey Harrison82f74e72008-01-30 13:31:07 +010056#endif
57
Cyrill Gorcunov7ab15242009-03-06 19:08:34 +030058#endif /* __ASSEMBLY__ */
59
H. Peter Anvin1965aae2008-10-22 22:26:29 -070060#endif /* _ASM_X86_LINKAGE_H */
Harvey Harrison82f74e72008-01-30 13:31:07 +010061