blob: df21ea0493694a43db95b28f760263d6595f222d [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ASM_MSR_H
2#define __ASM_MSR_H
3
H. Peter Anvin4bc5aa92007-05-02 19:27:12 +02004#include <asm/msr-index.h>
5
6#ifdef __KERNEL__
7#ifndef __ASSEMBLY__
8
Rusty Russell90a0a062007-05-02 19:27:10 +02009#include <asm/errno.h>
10
11static inline unsigned long long native_read_msr(unsigned int msr)
12{
13 unsigned long long val;
14
15 asm volatile("rdmsr" : "=A" (val) : "c" (msr));
16 return val;
17}
18
19static inline unsigned long long native_read_msr_safe(unsigned int msr,
20 int *err)
21{
22 unsigned long long val;
23
24 asm volatile("2: rdmsr ; xorl %0,%0\n"
25 "1:\n\t"
26 ".section .fixup,\"ax\"\n\t"
27 "3: movl %3,%0 ; jmp 1b\n\t"
28 ".previous\n\t"
29 ".section __ex_table,\"a\"\n"
30 " .align 4\n\t"
31 " .long 2b,3b\n\t"
32 ".previous"
33 : "=r" (*err), "=A" (val)
34 : "c" (msr), "i" (-EFAULT));
35
36 return val;
37}
38
39static inline void native_write_msr(unsigned int msr, unsigned long long val)
40{
41 asm volatile("wrmsr" : : "c" (msr), "A"(val));
42}
43
44static inline int native_write_msr_safe(unsigned int msr,
45 unsigned long long val)
46{
47 int err;
48 asm volatile("2: wrmsr ; xorl %0,%0\n"
49 "1:\n\t"
50 ".section .fixup,\"ax\"\n\t"
51 "3: movl %4,%0 ; jmp 1b\n\t"
52 ".previous\n\t"
53 ".section __ex_table,\"a\"\n"
54 " .align 4\n\t"
55 " .long 2b,3b\n\t"
56 ".previous"
57 : "=a" (err)
58 : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)),
59 "i" (-EFAULT));
60 return err;
61}
62
63static inline unsigned long long native_read_tsc(void)
64{
65 unsigned long long val;
66 asm volatile("rdtsc" : "=A" (val));
67 return val;
68}
69
70static inline unsigned long long native_read_pmc(void)
71{
72 unsigned long long val;
73 asm volatile("rdpmc" : "=A" (val));
74 return val;
75}
76
Rusty Russelld3561b72006-12-07 02:14:07 +010077#ifdef CONFIG_PARAVIRT
78#include <asm/paravirt.h>
79#else
Rudolf Marek4e9baad2007-05-08 17:22:01 +020080#include <linux/errno.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070081/*
82 * Access to machine-specific registers (available on 586 and better only)
83 * Note: the rd* operations modify the parameters directly (without using
84 * pointer indirection), this allows gcc to optimize better
85 */
86
Rusty Russell90a0a062007-05-02 19:27:10 +020087#define rdmsr(msr,val1,val2) \
88 do { \
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -070089 u64 __val = native_read_msr(msr); \
90 (val1) = (u32)__val; \
91 (val2) = (u32)(__val >> 32); \
Rusty Russell90a0a062007-05-02 19:27:10 +020092 } while(0)
Linus Torvalds1da177e2005-04-16 15:20:36 -070093
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -070094static inline void wrmsr(u32 __msr, u32 __low, u32 __high)
Linus Torvalds1da177e2005-04-16 15:20:36 -070095{
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -070096 native_write_msr(__msr, ((u64)__high << 32) | __low);
Linus Torvalds1da177e2005-04-16 15:20:36 -070097}
98
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -070099#define rdmsrl(msr,val) \
100 ((val) = native_read_msr(msr))
101
102#define wrmsrl(msr,val) native_write_msr(msr, val)
103
Linus Torvalds1da177e2005-04-16 15:20:36 -0700104/* wrmsr with exception handling */
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -0700105static inline int wrmsr_safe(u32 __msr, u32 __low, u32 __high)
106{
107 return native_write_msr_safe(__msr, ((u64)__high << 32) | __low);
108}
Linus Torvalds1da177e2005-04-16 15:20:36 -0700109
Zachary Amsdenf2ab4462005-09-03 15:56:42 -0700110/* rdmsr with exception handling */
Rusty Russell90a0a062007-05-02 19:27:10 +0200111#define rdmsr_safe(msr,p1,p2) \
112 ({ \
113 int __err; \
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -0700114 u64 __val = native_read_msr_safe(msr, &__err); \
115 (*p1) = (u32)__val; \
116 (*p2) = (u32)(__val >> 32); \
Rusty Russell90a0a062007-05-02 19:27:10 +0200117 __err; \
118 })
Zachary Amsdenf2ab4462005-09-03 15:56:42 -0700119
Rusty Russell90a0a062007-05-02 19:27:10 +0200120#define rdtscl(low) \
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -0700121 ((low) = (u32)native_read_tsc())
Linus Torvalds1da177e2005-04-16 15:20:36 -0700122
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -0700123#define rdtscll(val) \
124 ((val) = native_read_tsc())
Linus Torvalds1da177e2005-04-16 15:20:36 -0700125
126#define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
127
Rusty Russell90a0a062007-05-02 19:27:10 +0200128#define rdpmc(counter,low,high) \
129 do { \
130 u64 _l = native_read_pmc(); \
H. Peter Anvinb0b73cb2007-05-09 00:02:11 -0700131 (low) = (u32)_l; \
132 (high) = (u32)(_l >> 32); \
Rusty Russell90a0a062007-05-02 19:27:10 +0200133 } while(0)
Rusty Russelld3561b72006-12-07 02:14:07 +0100134#endif /* !CONFIG_PARAVIRT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700135
Adrian Bunkb44755c2007-02-20 01:07:13 +0100136#ifdef CONFIG_SMP
Alexey Dobriyanb077ffb2007-02-16 01:48:11 -0800137void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
138void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
Rudolf Marek4e9baad2007-05-08 17:22:01 +0200139int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
140int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
Adrian Bunkb44755c2007-02-20 01:07:13 +0100141#else /* CONFIG_SMP */
142static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
143{
144 rdmsr(msr_no, *l, *h);
145}
146static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
147{
148 wrmsr(msr_no, l, h);
149}
Rudolf Marek4e9baad2007-05-08 17:22:01 +0200150static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
151{
152 return rdmsr_safe(msr_no, l, h);
153}
154static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
155{
156 return wrmsr_safe(msr_no, l, h);
157}
Adrian Bunkb44755c2007-02-20 01:07:13 +0100158#endif /* CONFIG_SMP */
H. Peter Anvin4bc5aa92007-05-02 19:27:12 +0200159#endif
160#endif
Linus Torvalds1da177e2005-04-16 15:20:36 -0700161#endif /* __ASM_MSR_H */