blob: 96adbabec7406fe2c285baad3629257ac8579702 [file] [log] [blame]
Gerd Hoffmann9a0b5812006-03-23 02:59:32 -08001#ifndef _I386_ALTERNATIVE_H
2#define _I386_ALTERNATIVE_H
3
4#ifdef __KERNEL__
5
Kirill Smelkov30343d62006-06-23 02:04:33 -07006#include <asm/types.h>
7
Gerd Hoffmannd167a512006-06-26 13:56:16 +02008#include <linux/types.h>
9
Gerd Hoffmann9a0b5812006-03-23 02:59:32 -080010struct alt_instr {
11 u8 *instr; /* original instruction */
12 u8 *replacement;
13 u8 cpuid; /* cpuid bit set for replacement */
14 u8 instrlen; /* length of original instruction */
15 u8 replacementlen; /* length of new instruction, <= instrlen */
16 u8 pad;
17};
18
19extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
20
21struct module;
Gerd Hoffmann8ec4d412006-07-01 04:36:18 -070022#ifdef CONFIG_SMP
Gerd Hoffmann9a0b5812006-03-23 02:59:32 -080023extern void alternatives_smp_module_add(struct module *mod, char *name,
24 void *locks, void *locks_end,
25 void *text, void *text_end);
26extern void alternatives_smp_module_del(struct module *mod);
27extern void alternatives_smp_switch(int smp);
Gerd Hoffmann8ec4d412006-07-01 04:36:18 -070028#else
29static inline void alternatives_smp_module_add(struct module *mod, char *name,
30 void *locks, void *locks_end,
31 void *text, void *text_end) {}
32static inline void alternatives_smp_module_del(struct module *mod) {}
33static inline void alternatives_smp_switch(int smp) {}
34#endif
Gerd Hoffmann9a0b5812006-03-23 02:59:32 -080035
36#endif
37
38/*
39 * Alternative instructions for different CPU types or capabilities.
40 *
41 * This allows to use optimized instructions even on generic binary
42 * kernels.
43 *
44 * length of oldinstr must be longer or equal the length of newinstr
45 * It can be padded with nops as needed.
46 *
47 * For non barrier like inlines please define new variants
48 * without volatile and memory clobber.
49 */
50#define alternative(oldinstr, newinstr, feature) \
51 asm volatile ("661:\n\t" oldinstr "\n662:\n" \
52 ".section .altinstructions,\"a\"\n" \
53 " .align 4\n" \
54 " .long 661b\n" /* label */ \
55 " .long 663f\n" /* new instruction */ \
56 " .byte %c0\n" /* feature bit */ \
57 " .byte 662b-661b\n" /* sourcelen */ \
58 " .byte 664f-663f\n" /* replacementlen */ \
59 ".previous\n" \
60 ".section .altinstr_replacement,\"ax\"\n" \
61 "663:\n\t" newinstr "\n664:\n" /* replacement */\
62 ".previous" :: "i" (feature) : "memory")
63
64/*
65 * Alternative inline assembly with input.
66 *
67 * Pecularities:
68 * No memory clobber here.
69 * Argument numbers start with 1.
70 * Best is to use constraints that are fixed size (like (%1) ... "r")
71 * If you use variable sized constraints like "m" or "g" in the
72 * replacement maake sure to pad to the worst case length.
73 */
74#define alternative_input(oldinstr, newinstr, feature, input...) \
75 asm volatile ("661:\n\t" oldinstr "\n662:\n" \
76 ".section .altinstructions,\"a\"\n" \
77 " .align 4\n" \
78 " .long 661b\n" /* label */ \
79 " .long 663f\n" /* new instruction */ \
80 " .byte %c0\n" /* feature bit */ \
81 " .byte 662b-661b\n" /* sourcelen */ \
82 " .byte 664f-663f\n" /* replacementlen */ \
83 ".previous\n" \
84 ".section .altinstr_replacement,\"ax\"\n" \
85 "663:\n\t" newinstr "\n664:\n" /* replacement */\
86 ".previous" :: "i" (feature), ##input)
87
88/*
89 * Alternative inline assembly for SMP.
90 *
91 * alternative_smp() takes two versions (SMP first, UP second) and is
92 * for more complex stuff such as spinlocks.
93 *
94 * The LOCK_PREFIX macro defined here replaces the LOCK and
95 * LOCK_PREFIX macros used everywhere in the source tree.
96 *
97 * SMP alternatives use the same data structures as the other
98 * alternatives and the X86_FEATURE_UP flag to indicate the case of a
99 * UP system running a SMP kernel. The existing apply_alternatives()
100 * works fine for patching a SMP kernel for UP.
101 *
102 * The SMP alternative tables can be kept after boot and contain both
103 * UP and SMP versions of the instructions to allow switching back to
104 * SMP at runtime, when hotplugging in a new CPU, which is especially
105 * useful in virtualized environments.
106 *
107 * The very common lock prefix is handled as special case in a
108 * separate table which is a pure address list without replacement ptr
109 * and size information. That keeps the table sizes small.
110 */
111
112#ifdef CONFIG_SMP
113#define alternative_smp(smpinstr, upinstr, args...) \
114 asm volatile ("661:\n\t" smpinstr "\n662:\n" \
115 ".section .smp_altinstructions,\"a\"\n" \
116 " .align 4\n" \
117 " .long 661b\n" /* label */ \
118 " .long 663f\n" /* new instruction */ \
119 " .byte 0x68\n" /* X86_FEATURE_UP */ \
120 " .byte 662b-661b\n" /* sourcelen */ \
121 " .byte 664f-663f\n" /* replacementlen */ \
122 ".previous\n" \
123 ".section .smp_altinstr_replacement,\"awx\"\n" \
124 "663:\n\t" upinstr "\n" /* replacement */ \
125 "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
126 ".previous" : args)
127
128#define LOCK_PREFIX \
129 ".section .smp_locks,\"a\"\n" \
130 " .align 4\n" \
131 " .long 661f\n" /* address */ \
132 ".previous\n" \
133 "661:\n\tlock; "
134
135#else /* ! CONFIG_SMP */
136#define alternative_smp(smpinstr, upinstr, args...) \
137 asm volatile (upinstr : args)
138#define LOCK_PREFIX ""
139#endif
140
141#endif /* _I386_ALTERNATIVE_H */