H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_MICROCODE_H |
| 2 | #define _ASM_X86_MICROCODE_H |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 3 | |
Borislav Petkov | e1b43e3 | 2013-12-04 12:31:31 +0100 | [diff] [blame] | 4 | #define native_rdmsr(msr, val1, val2) \ |
| 5 | do { \ |
| 6 | u64 __val = native_read_msr((msr)); \ |
| 7 | (void)((val1) = (u32)__val); \ |
| 8 | (void)((val2) = (u32)(__val >> 32)); \ |
| 9 | } while (0) |
| 10 | |
| 11 | #define native_wrmsr(msr, low, high) \ |
| 12 | native_write_msr(msr, low, high) |
| 13 | |
| 14 | #define native_wrmsrl(msr, val) \ |
| 15 | native_write_msr((msr), \ |
| 16 | (u32)((u64)(val)), \ |
| 17 | (u32)((u64)(val) >> 32)) |
| 18 | |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 19 | struct cpu_signature { |
| 20 | unsigned int sig; |
| 21 | unsigned int pf; |
| 22 | unsigned int rev; |
| 23 | }; |
Peter Oruba | 8d86f39 | 2008-07-28 18:44:21 +0200 | [diff] [blame] | 24 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 25 | struct device; |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 26 | |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 27 | enum ucode_state { UCODE_ERROR, UCODE_OK, UCODE_NFOUND }; |
| 28 | |
Peter Oruba | 26bf7a4 | 2008-07-28 18:44:20 +0200 | [diff] [blame] | 29 | struct microcode_ops { |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 30 | enum ucode_state (*request_microcode_user) (int cpu, |
| 31 | const void __user *buf, size_t size); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 32 | |
Borislav Petkov | 48e3068 | 2012-07-26 15:51:00 +0200 | [diff] [blame] | 33 | enum ucode_state (*request_microcode_fw) (int cpu, struct device *, |
| 34 | bool refresh_fw); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 35 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 36 | void (*microcode_fini_cpu) (int cpu); |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 37 | |
| 38 | /* |
| 39 | * The generic 'microcode_core' part guarantees that |
| 40 | * the callbacks below run on a target cpu when they |
| 41 | * are being called. |
| 42 | * See also the "Synchronization" section in microcode_core.c. |
| 43 | */ |
| 44 | int (*apply_microcode) (int cpu); |
| 45 | int (*collect_cpu_info) (int cpu, struct cpu_signature *csig); |
Peter Oruba | 26bf7a4 | 2008-07-28 18:44:20 +0200 | [diff] [blame] | 46 | }; |
| 47 | |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 48 | struct ucode_cpu_info { |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 49 | struct cpu_signature cpu_sig; |
| 50 | int valid; |
| 51 | void *mc; |
Peter Oruba | c3b71bc | 2008-07-28 18:44:15 +0200 | [diff] [blame] | 52 | }; |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 53 | extern struct ucode_cpu_info ucode_cpu_info[]; |
| 54 | |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 55 | #ifdef CONFIG_MICROCODE_INTEL |
| 56 | extern struct microcode_ops * __init init_intel_microcode(void); |
| 57 | #else |
| 58 | static inline struct microcode_ops * __init init_intel_microcode(void) |
| 59 | { |
| 60 | return NULL; |
| 61 | } |
| 62 | #endif /* CONFIG_MICROCODE_INTEL */ |
| 63 | |
| 64 | #ifdef CONFIG_MICROCODE_AMD |
| 65 | extern struct microcode_ops * __init init_amd_microcode(void); |
Borislav Petkov | f72c1a5 | 2011-12-02 16:50:04 +0100 | [diff] [blame] | 66 | extern void __exit exit_amd_microcode(void); |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 67 | #else |
| 68 | static inline struct microcode_ops * __init init_amd_microcode(void) |
| 69 | { |
| 70 | return NULL; |
| 71 | } |
Borislav Petkov | f72c1a5 | 2011-12-02 16:50:04 +0100 | [diff] [blame] | 72 | static inline void __exit exit_amd_microcode(void) {} |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 73 | #endif |
| 74 | |
Fenghua Yu | a8ebf6d | 2012-12-20 23:44:25 -0800 | [diff] [blame] | 75 | #ifdef CONFIG_MICROCODE_EARLY |
| 76 | #define MAX_UCODE_COUNT 128 |
| 77 | extern void __init load_ucode_bsp(void); |
Paul Gortmaker | 148f9bb | 2013-06-18 18:23:59 -0400 | [diff] [blame] | 78 | extern void load_ucode_ap(void); |
Fenghua Yu | a8ebf6d | 2012-12-20 23:44:25 -0800 | [diff] [blame] | 79 | extern int __init save_microcode_in_initrd(void); |
| 80 | #else |
| 81 | static inline void __init load_ucode_bsp(void) {} |
Paul Gortmaker | 148f9bb | 2013-06-18 18:23:59 -0400 | [diff] [blame] | 82 | static inline void load_ucode_ap(void) {} |
Fenghua Yu | a8ebf6d | 2012-12-20 23:44:25 -0800 | [diff] [blame] | 83 | static inline int __init save_microcode_in_initrd(void) |
| 84 | { |
| 85 | return 0; |
| 86 | } |
| 87 | #endif |
| 88 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 89 | #endif /* _ASM_X86_MICROCODE_H */ |