blob: a6657b4f3e0eccc7660983382c378b67813d16d3 [file] [log] [blame]
Gerd Hoffmannd167a512006-06-26 13:56:16 +02001#ifndef _X86_64_ALTERNATIVE_H
2#define _X86_64_ALTERNATIVE_H
3
4#ifdef __KERNEL__
5
6#include <linux/types.h>
Rusty Russell139ec7c2006-12-07 02:14:08 +01007#include <linux/stddef.h>
Jan Beulich61171b82006-08-30 19:37:10 +02008#include <asm/cpufeature.h>
Gerd Hoffmannd167a512006-06-26 13:56:16 +02009
10struct alt_instr {
11 u8 *instr; /* original instruction */
12 u8 *replacement;
13 u8 cpuid; /* cpuid bit set for replacement */
14 u8 instrlen; /* length of original instruction */
15 u8 replacementlen; /* length of new instruction, <= instrlen */
16 u8 pad[5];
17};
18
19extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
20
21struct module;
Gerd Hoffmann8ec4d412006-07-01 04:36:18 -070022
23#ifdef CONFIG_SMP
Gerd Hoffmannd167a512006-06-26 13:56:16 +020024extern void alternatives_smp_module_add(struct module *mod, char *name,
25 void *locks, void *locks_end,
26 void *text, void *text_end);
27extern void alternatives_smp_module_del(struct module *mod);
28extern void alternatives_smp_switch(int smp);
Gerd Hoffmann8ec4d412006-07-01 04:36:18 -070029#else
30static inline void alternatives_smp_module_add(struct module *mod, char *name,
31 void *locks, void *locks_end,
32 void *text, void *text_end) {}
33static inline void alternatives_smp_module_del(struct module *mod) {}
34static inline void alternatives_smp_switch(int smp) {}
35#endif
Gerd Hoffmannd167a512006-06-26 13:56:16 +020036
37#endif
38
39/*
40 * Alternative instructions for different CPU types or capabilities.
41 *
42 * This allows to use optimized instructions even on generic binary
43 * kernels.
44 *
45 * length of oldinstr must be longer or equal the length of newinstr
46 * It can be padded with nops as needed.
47 *
48 * For non barrier like inlines please define new variants
49 * without volatile and memory clobber.
50 */
51#define alternative(oldinstr, newinstr, feature) \
52 asm volatile ("661:\n\t" oldinstr "\n662:\n" \
53 ".section .altinstructions,\"a\"\n" \
54 " .align 8\n" \
55 " .quad 661b\n" /* label */ \
56 " .quad 663f\n" /* new instruction */ \
57 " .byte %c0\n" /* feature bit */ \
58 " .byte 662b-661b\n" /* sourcelen */ \
59 " .byte 664f-663f\n" /* replacementlen */ \
60 ".previous\n" \
61 ".section .altinstr_replacement,\"ax\"\n" \
62 "663:\n\t" newinstr "\n664:\n" /* replacement */ \
63 ".previous" :: "i" (feature) : "memory")
64
65/*
66 * Alternative inline assembly with input.
67 *
68 * Pecularities:
69 * No memory clobber here.
70 * Argument numbers start with 1.
71 * Best is to use constraints that are fixed size (like (%1) ... "r")
72 * If you use variable sized constraints like "m" or "g" in the
73 * replacement make sure to pad to the worst case length.
74 */
75#define alternative_input(oldinstr, newinstr, feature, input...) \
76 asm volatile ("661:\n\t" oldinstr "\n662:\n" \
77 ".section .altinstructions,\"a\"\n" \
78 " .align 8\n" \
79 " .quad 661b\n" /* label */ \
80 " .quad 663f\n" /* new instruction */ \
81 " .byte %c0\n" /* feature bit */ \
82 " .byte 662b-661b\n" /* sourcelen */ \
83 " .byte 664f-663f\n" /* replacementlen */ \
84 ".previous\n" \
85 ".section .altinstr_replacement,\"ax\"\n" \
86 "663:\n\t" newinstr "\n664:\n" /* replacement */ \
87 ".previous" :: "i" (feature), ##input)
88
89/* Like alternative_input, but with a single output argument */
90#define alternative_io(oldinstr, newinstr, feature, output, input...) \
91 asm volatile ("661:\n\t" oldinstr "\n662:\n" \
92 ".section .altinstructions,\"a\"\n" \
93 " .align 8\n" \
94 " .quad 661b\n" /* label */ \
95 " .quad 663f\n" /* new instruction */ \
96 " .byte %c[feat]\n" /* feature bit */ \
97 " .byte 662b-661b\n" /* sourcelen */ \
98 " .byte 664f-663f\n" /* replacementlen */ \
99 ".previous\n" \
100 ".section .altinstr_replacement,\"ax\"\n" \
101 "663:\n\t" newinstr "\n664:\n" /* replacement */ \
102 ".previous" : output : [feat] "i" (feature), ##input)
103
104/*
105 * Alternative inline assembly for SMP.
106 *
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200107 * The LOCK_PREFIX macro defined here replaces the LOCK and
108 * LOCK_PREFIX macros used everywhere in the source tree.
109 *
110 * SMP alternatives use the same data structures as the other
111 * alternatives and the X86_FEATURE_UP flag to indicate the case of a
112 * UP system running a SMP kernel. The existing apply_alternatives()
113 * works fine for patching a SMP kernel for UP.
114 *
115 * The SMP alternative tables can be kept after boot and contain both
116 * UP and SMP versions of the instructions to allow switching back to
117 * SMP at runtime, when hotplugging in a new CPU, which is especially
118 * useful in virtualized environments.
119 *
120 * The very common lock prefix is handled as special case in a
121 * separate table which is a pure address list without replacement ptr
122 * and size information. That keeps the table sizes small.
123 */
124
125#ifdef CONFIG_SMP
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200126#define LOCK_PREFIX \
127 ".section .smp_locks,\"a\"\n" \
128 " .align 8\n" \
129 " .quad 661f\n" /* address */ \
130 ".previous\n" \
131 "661:\n\tlock; "
132
133#else /* ! CONFIG_SMP */
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200134#define LOCK_PREFIX ""
135#endif
136
Rusty Russell139ec7c2006-12-07 02:14:08 +0100137struct paravirt_patch;
138#ifdef CONFIG_PARAVIRT
139void apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end);
140#else
141static inline void
142apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end)
143{}
144#define __start_parainstructions NULL
145#define __stop_parainstructions NULL
146#endif
147
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200148#endif /* _X86_64_ALTERNATIVE_H */