Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef X86_64_MSR_H |
| 2 | #define X86_64_MSR_H 1 |
| 3 | |
| 4 | #ifndef __ASSEMBLY__ |
| 5 | /* |
| 6 | * Access to machine-specific registers (available on 586 and better only) |
| 7 | * Note: the rd* operations modify the parameters directly (without using |
| 8 | * pointer indirection), this allows gcc to optimize better |
| 9 | */ |
| 10 | |
| 11 | #define rdmsr(msr,val1,val2) \ |
| 12 | __asm__ __volatile__("rdmsr" \ |
| 13 | : "=a" (val1), "=d" (val2) \ |
| 14 | : "c" (msr)) |
| 15 | |
| 16 | |
| 17 | #define rdmsrl(msr,val) do { unsigned long a__,b__; \ |
| 18 | __asm__ __volatile__("rdmsr" \ |
| 19 | : "=a" (a__), "=d" (b__) \ |
| 20 | : "c" (msr)); \ |
| 21 | val = a__ | (b__<<32); \ |
Andi Kleen | a88cde1 | 2005-11-05 17:25:54 +0100 | [diff] [blame] | 22 | } while(0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 23 | |
| 24 | #define wrmsr(msr,val1,val2) \ |
| 25 | __asm__ __volatile__("wrmsr" \ |
| 26 | : /* no outputs */ \ |
| 27 | : "c" (msr), "a" (val1), "d" (val2)) |
| 28 | |
| 29 | #define wrmsrl(msr,val) wrmsr(msr,(__u32)((__u64)(val)),((__u64)(val))>>32) |
| 30 | |
| 31 | /* wrmsr with exception handling */ |
Andi Kleen | 059bf0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 32 | #define wrmsr_safe(msr,a,b) ({ int ret__; \ |
| 33 | asm volatile("2: wrmsr ; xorl %0,%0\n" \ |
| 34 | "1:\n\t" \ |
| 35 | ".section .fixup,\"ax\"\n\t" \ |
| 36 | "3: movl %4,%0 ; jmp 1b\n\t" \ |
| 37 | ".previous\n\t" \ |
| 38 | ".section __ex_table,\"a\"\n" \ |
| 39 | " .align 8\n\t" \ |
| 40 | " .quad 2b,3b\n\t" \ |
| 41 | ".previous" \ |
| 42 | : "=a" (ret__) \ |
| 43 | : "c" (msr), "0" (a), "d" (b), "i" (-EFAULT)); \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 44 | ret__; }) |
| 45 | |
| 46 | #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32)) |
| 47 | |
Andi Kleen | 059bf0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 48 | #define rdmsr_safe(msr,a,b) \ |
| 49 | ({ int ret__; \ |
| 50 | asm volatile ("1: rdmsr\n" \ |
| 51 | "2:\n" \ |
| 52 | ".section .fixup,\"ax\"\n" \ |
| 53 | "3: movl %4,%0\n" \ |
| 54 | " jmp 2b\n" \ |
| 55 | ".previous\n" \ |
| 56 | ".section __ex_table,\"a\"\n" \ |
| 57 | " .align 8\n" \ |
| 58 | " .quad 1b,3b\n" \ |
Jacob.Shin@amd.com | e6c6675 | 2005-11-20 18:49:07 +0100 | [diff] [blame] | 59 | ".previous":"=&bDS" (ret__), "=a"(*(a)), "=d"(*(b))\ |
Andi Kleen | 059bf0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 60 | :"c"(msr), "i"(-EIO), "0"(0)); \ |
| 61 | ret__; }) |
| 62 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 63 | #define rdtsc(low,high) \ |
| 64 | __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high)) |
| 65 | |
| 66 | #define rdtscl(low) \ |
| 67 | __asm__ __volatile__ ("rdtsc" : "=a" (low) : : "edx") |
| 68 | |
Vojtech Pavlik | 81af444 | 2006-09-26 10:52:28 +0200 | [diff] [blame] | 69 | #define rdtscp(low,high,aux) \ |
| 70 | asm volatile (".byte 0x0f,0x01,0xf9" : "=a" (low), "=d" (high), "=c" (aux)) |
| 71 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 72 | #define rdtscll(val) do { \ |
| 73 | unsigned int __a,__d; \ |
| 74 | asm volatile("rdtsc" : "=a" (__a), "=d" (__d)); \ |
| 75 | (val) = ((unsigned long)__a) | (((unsigned long)__d)<<32); \ |
| 76 | } while(0) |
| 77 | |
Vojtech Pavlik | 81af444 | 2006-09-26 10:52:28 +0200 | [diff] [blame] | 78 | #define rdtscpll(val, aux) do { \ |
| 79 | unsigned long __a, __d; \ |
| 80 | asm volatile (".byte 0x0f,0x01,0xf9" : "=a" (__a), "=d" (__d), "=c" (aux)); \ |
| 81 | (val) = (__d << 32) | __a; \ |
| 82 | } while (0) |
| 83 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 84 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) |
| 85 | |
Vojtech Pavlik | 81af444 | 2006-09-26 10:52:28 +0200 | [diff] [blame] | 86 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0) |
| 87 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 88 | #define rdpmc(counter,low,high) \ |
| 89 | __asm__ __volatile__("rdpmc" \ |
| 90 | : "=a" (low), "=d" (high) \ |
| 91 | : "c" (counter)) |
| 92 | |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 93 | static inline void cpuid(int op, unsigned int *eax, unsigned int *ebx, |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 94 | unsigned int *ecx, unsigned int *edx) |
| 95 | { |
| 96 | __asm__("cpuid" |
| 97 | : "=a" (*eax), |
| 98 | "=b" (*ebx), |
| 99 | "=c" (*ecx), |
| 100 | "=d" (*edx) |
| 101 | : "0" (op)); |
| 102 | } |
| 103 | |
| 104 | /* Some CPUID calls want 'count' to be placed in ecx */ |
| 105 | static inline void cpuid_count(int op, int count, int *eax, int *ebx, int *ecx, |
| 106 | int *edx) |
| 107 | { |
| 108 | __asm__("cpuid" |
| 109 | : "=a" (*eax), |
| 110 | "=b" (*ebx), |
| 111 | "=c" (*ecx), |
| 112 | "=d" (*edx) |
| 113 | : "0" (op), "c" (count)); |
| 114 | } |
| 115 | |
| 116 | /* |
| 117 | * CPUID functions returning a single datum |
| 118 | */ |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 119 | static inline unsigned int cpuid_eax(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 120 | { |
| 121 | unsigned int eax; |
| 122 | |
| 123 | __asm__("cpuid" |
| 124 | : "=a" (eax) |
| 125 | : "0" (op) |
| 126 | : "bx", "cx", "dx"); |
| 127 | return eax; |
| 128 | } |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 129 | static inline unsigned int cpuid_ebx(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 130 | { |
| 131 | unsigned int eax, ebx; |
| 132 | |
| 133 | __asm__("cpuid" |
| 134 | : "=a" (eax), "=b" (ebx) |
| 135 | : "0" (op) |
| 136 | : "cx", "dx" ); |
| 137 | return ebx; |
| 138 | } |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 139 | static inline unsigned int cpuid_ecx(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 140 | { |
| 141 | unsigned int eax, ecx; |
| 142 | |
| 143 | __asm__("cpuid" |
| 144 | : "=a" (eax), "=c" (ecx) |
| 145 | : "0" (op) |
| 146 | : "bx", "dx" ); |
| 147 | return ecx; |
| 148 | } |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 149 | static inline unsigned int cpuid_edx(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 150 | { |
| 151 | unsigned int eax, edx; |
| 152 | |
| 153 | __asm__("cpuid" |
| 154 | : "=a" (eax), "=d" (edx) |
| 155 | : "0" (op) |
| 156 | : "bx", "cx"); |
| 157 | return edx; |
| 158 | } |
| 159 | |
| 160 | #define MSR_IA32_UCODE_WRITE 0x79 |
| 161 | #define MSR_IA32_UCODE_REV 0x8b |
| 162 | |
| 163 | |
| 164 | #endif |
| 165 | |
| 166 | /* AMD/K8 specific MSRs */ |
| 167 | #define MSR_EFER 0xc0000080 /* extended feature register */ |
| 168 | #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */ |
| 169 | #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */ |
| 170 | #define MSR_CSTAR 0xc0000083 /* compatibility mode SYSCALL target */ |
| 171 | #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */ |
Wink Saville | c7a3392 | 2006-12-07 02:14:11 +0100 | [diff] [blame] | 172 | #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */ |
| 173 | #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 174 | #define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow (or USER_GS from kernel) */ |
| 175 | /* EFER bits: */ |
| 176 | #define _EFER_SCE 0 /* SYSCALL/SYSRET */ |
| 177 | #define _EFER_LME 8 /* Long mode enable */ |
| 178 | #define _EFER_LMA 10 /* Long mode active (read-only) */ |
| 179 | #define _EFER_NX 11 /* No execute enable */ |
| 180 | |
| 181 | #define EFER_SCE (1<<_EFER_SCE) |
| 182 | #define EFER_LME (1<<_EFER_LME) |
| 183 | #define EFER_LMA (1<<_EFER_LMA) |
| 184 | #define EFER_NX (1<<_EFER_NX) |
| 185 | |
| 186 | /* Intel MSRs. Some also available on other CPUs */ |
Andi Kleen | a8ab26f | 2005-04-16 15:25:19 -0700 | [diff] [blame] | 187 | #define MSR_IA32_TSC 0x10 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 188 | #define MSR_IA32_PLATFORM_ID 0x17 |
| 189 | |
| 190 | #define MSR_IA32_PERFCTR0 0xc1 |
| 191 | #define MSR_IA32_PERFCTR1 0xc2 |
| 192 | |
| 193 | #define MSR_MTRRcap 0x0fe |
| 194 | #define MSR_IA32_BBL_CR_CTL 0x119 |
| 195 | |
| 196 | #define MSR_IA32_SYSENTER_CS 0x174 |
| 197 | #define MSR_IA32_SYSENTER_ESP 0x175 |
| 198 | #define MSR_IA32_SYSENTER_EIP 0x176 |
| 199 | |
| 200 | #define MSR_IA32_MCG_CAP 0x179 |
| 201 | #define MSR_IA32_MCG_STATUS 0x17a |
| 202 | #define MSR_IA32_MCG_CTL 0x17b |
| 203 | |
| 204 | #define MSR_IA32_EVNTSEL0 0x186 |
| 205 | #define MSR_IA32_EVNTSEL1 0x187 |
| 206 | |
| 207 | #define MSR_IA32_DEBUGCTLMSR 0x1d9 |
| 208 | #define MSR_IA32_LASTBRANCHFROMIP 0x1db |
| 209 | #define MSR_IA32_LASTBRANCHTOIP 0x1dc |
| 210 | #define MSR_IA32_LASTINTFROMIP 0x1dd |
| 211 | #define MSR_IA32_LASTINTTOIP 0x1de |
| 212 | |
Stephane Eranian | 86efef5 | 2006-12-07 02:14:02 +0100 | [diff] [blame] | 213 | #define MSR_IA32_PEBS_ENABLE 0x3f1 |
| 214 | #define MSR_IA32_DS_AREA 0x600 |
| 215 | #define MSR_IA32_PERF_CAPABILITIES 0x345 |
| 216 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 217 | #define MSR_MTRRfix64K_00000 0x250 |
| 218 | #define MSR_MTRRfix16K_80000 0x258 |
| 219 | #define MSR_MTRRfix16K_A0000 0x259 |
| 220 | #define MSR_MTRRfix4K_C0000 0x268 |
| 221 | #define MSR_MTRRfix4K_C8000 0x269 |
| 222 | #define MSR_MTRRfix4K_D0000 0x26a |
| 223 | #define MSR_MTRRfix4K_D8000 0x26b |
| 224 | #define MSR_MTRRfix4K_E0000 0x26c |
| 225 | #define MSR_MTRRfix4K_E8000 0x26d |
| 226 | #define MSR_MTRRfix4K_F0000 0x26e |
| 227 | #define MSR_MTRRfix4K_F8000 0x26f |
| 228 | #define MSR_MTRRdefType 0x2ff |
| 229 | |
| 230 | #define MSR_IA32_MC0_CTL 0x400 |
| 231 | #define MSR_IA32_MC0_STATUS 0x401 |
| 232 | #define MSR_IA32_MC0_ADDR 0x402 |
| 233 | #define MSR_IA32_MC0_MISC 0x403 |
| 234 | |
| 235 | #define MSR_P6_PERFCTR0 0xc1 |
| 236 | #define MSR_P6_PERFCTR1 0xc2 |
| 237 | #define MSR_P6_EVNTSEL0 0x186 |
| 238 | #define MSR_P6_EVNTSEL1 0x187 |
| 239 | |
| 240 | /* K7/K8 MSRs. Not complete. See the architecture manual for a more complete list. */ |
| 241 | #define MSR_K7_EVNTSEL0 0xC0010000 |
| 242 | #define MSR_K7_PERFCTR0 0xC0010004 |
| 243 | #define MSR_K7_EVNTSEL1 0xC0010001 |
| 244 | #define MSR_K7_PERFCTR1 0xC0010005 |
| 245 | #define MSR_K7_EVNTSEL2 0xC0010002 |
| 246 | #define MSR_K7_PERFCTR2 0xC0010006 |
| 247 | #define MSR_K7_EVNTSEL3 0xC0010003 |
| 248 | #define MSR_K7_PERFCTR3 0xC0010007 |
| 249 | #define MSR_K8_TOP_MEM1 0xC001001A |
| 250 | #define MSR_K8_TOP_MEM2 0xC001001D |
Andi Kleen | 17158d1 | 2005-07-28 21:15:37 -0700 | [diff] [blame] | 251 | #define MSR_K8_SYSCFG 0xC0010010 |
Andi Kleen | 7d318d7 | 2005-09-29 22:05:55 +0200 | [diff] [blame] | 252 | #define MSR_K8_HWCR 0xC0010015 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 253 | |
| 254 | /* K6 MSRs */ |
| 255 | #define MSR_K6_EFER 0xC0000080 |
| 256 | #define MSR_K6_STAR 0xC0000081 |
| 257 | #define MSR_K6_WHCR 0xC0000082 |
| 258 | #define MSR_K6_UWCCR 0xC0000085 |
| 259 | #define MSR_K6_PSOR 0xC0000087 |
| 260 | #define MSR_K6_PFIR 0xC0000088 |
| 261 | |
| 262 | /* Centaur-Hauls/IDT defined MSRs. */ |
| 263 | #define MSR_IDT_FCR1 0x107 |
| 264 | #define MSR_IDT_FCR2 0x108 |
| 265 | #define MSR_IDT_FCR3 0x109 |
| 266 | #define MSR_IDT_FCR4 0x10a |
| 267 | |
| 268 | #define MSR_IDT_MCR0 0x110 |
| 269 | #define MSR_IDT_MCR1 0x111 |
| 270 | #define MSR_IDT_MCR2 0x112 |
| 271 | #define MSR_IDT_MCR3 0x113 |
| 272 | #define MSR_IDT_MCR4 0x114 |
| 273 | #define MSR_IDT_MCR5 0x115 |
| 274 | #define MSR_IDT_MCR6 0x116 |
| 275 | #define MSR_IDT_MCR7 0x117 |
| 276 | #define MSR_IDT_MCR_CTRL 0x120 |
| 277 | |
| 278 | /* VIA Cyrix defined MSRs*/ |
| 279 | #define MSR_VIA_FCR 0x1107 |
| 280 | #define MSR_VIA_LONGHAUL 0x110a |
| 281 | #define MSR_VIA_RNG 0x110b |
| 282 | #define MSR_VIA_BCR2 0x1147 |
| 283 | |
| 284 | /* Intel defined MSRs. */ |
| 285 | #define MSR_IA32_P5_MC_ADDR 0 |
| 286 | #define MSR_IA32_P5_MC_TYPE 1 |
| 287 | #define MSR_IA32_PLATFORM_ID 0x17 |
| 288 | #define MSR_IA32_EBL_CR_POWERON 0x2a |
| 289 | |
| 290 | #define MSR_IA32_APICBASE 0x1b |
| 291 | #define MSR_IA32_APICBASE_BSP (1<<8) |
| 292 | #define MSR_IA32_APICBASE_ENABLE (1<<11) |
| 293 | #define MSR_IA32_APICBASE_BASE (0xfffff<<12) |
| 294 | |
| 295 | /* P4/Xeon+ specific */ |
| 296 | #define MSR_IA32_MCG_EAX 0x180 |
| 297 | #define MSR_IA32_MCG_EBX 0x181 |
| 298 | #define MSR_IA32_MCG_ECX 0x182 |
| 299 | #define MSR_IA32_MCG_EDX 0x183 |
| 300 | #define MSR_IA32_MCG_ESI 0x184 |
| 301 | #define MSR_IA32_MCG_EDI 0x185 |
| 302 | #define MSR_IA32_MCG_EBP 0x186 |
| 303 | #define MSR_IA32_MCG_ESP 0x187 |
| 304 | #define MSR_IA32_MCG_EFLAGS 0x188 |
| 305 | #define MSR_IA32_MCG_EIP 0x189 |
| 306 | #define MSR_IA32_MCG_RESERVED 0x18A |
| 307 | |
| 308 | #define MSR_P6_EVNTSEL0 0x186 |
| 309 | #define MSR_P6_EVNTSEL1 0x187 |
| 310 | |
| 311 | #define MSR_IA32_PERF_STATUS 0x198 |
| 312 | #define MSR_IA32_PERF_CTL 0x199 |
| 313 | |
| 314 | #define MSR_IA32_THERM_CONTROL 0x19a |
| 315 | #define MSR_IA32_THERM_INTERRUPT 0x19b |
| 316 | #define MSR_IA32_THERM_STATUS 0x19c |
| 317 | #define MSR_IA32_MISC_ENABLE 0x1a0 |
| 318 | |
| 319 | #define MSR_IA32_DEBUGCTLMSR 0x1d9 |
| 320 | #define MSR_IA32_LASTBRANCHFROMIP 0x1db |
| 321 | #define MSR_IA32_LASTBRANCHTOIP 0x1dc |
| 322 | #define MSR_IA32_LASTINTFROMIP 0x1dd |
| 323 | #define MSR_IA32_LASTINTTOIP 0x1de |
| 324 | |
| 325 | #define MSR_IA32_MC0_CTL 0x400 |
| 326 | #define MSR_IA32_MC0_STATUS 0x401 |
| 327 | #define MSR_IA32_MC0_ADDR 0x402 |
| 328 | #define MSR_IA32_MC0_MISC 0x403 |
| 329 | |
| 330 | /* Pentium IV performance counter MSRs */ |
| 331 | #define MSR_P4_BPU_PERFCTR0 0x300 |
| 332 | #define MSR_P4_BPU_PERFCTR1 0x301 |
| 333 | #define MSR_P4_BPU_PERFCTR2 0x302 |
| 334 | #define MSR_P4_BPU_PERFCTR3 0x303 |
| 335 | #define MSR_P4_MS_PERFCTR0 0x304 |
| 336 | #define MSR_P4_MS_PERFCTR1 0x305 |
| 337 | #define MSR_P4_MS_PERFCTR2 0x306 |
| 338 | #define MSR_P4_MS_PERFCTR3 0x307 |
| 339 | #define MSR_P4_FLAME_PERFCTR0 0x308 |
| 340 | #define MSR_P4_FLAME_PERFCTR1 0x309 |
| 341 | #define MSR_P4_FLAME_PERFCTR2 0x30a |
| 342 | #define MSR_P4_FLAME_PERFCTR3 0x30b |
| 343 | #define MSR_P4_IQ_PERFCTR0 0x30c |
| 344 | #define MSR_P4_IQ_PERFCTR1 0x30d |
| 345 | #define MSR_P4_IQ_PERFCTR2 0x30e |
| 346 | #define MSR_P4_IQ_PERFCTR3 0x30f |
| 347 | #define MSR_P4_IQ_PERFCTR4 0x310 |
| 348 | #define MSR_P4_IQ_PERFCTR5 0x311 |
| 349 | #define MSR_P4_BPU_CCCR0 0x360 |
| 350 | #define MSR_P4_BPU_CCCR1 0x361 |
| 351 | #define MSR_P4_BPU_CCCR2 0x362 |
| 352 | #define MSR_P4_BPU_CCCR3 0x363 |
| 353 | #define MSR_P4_MS_CCCR0 0x364 |
| 354 | #define MSR_P4_MS_CCCR1 0x365 |
| 355 | #define MSR_P4_MS_CCCR2 0x366 |
| 356 | #define MSR_P4_MS_CCCR3 0x367 |
| 357 | #define MSR_P4_FLAME_CCCR0 0x368 |
| 358 | #define MSR_P4_FLAME_CCCR1 0x369 |
| 359 | #define MSR_P4_FLAME_CCCR2 0x36a |
| 360 | #define MSR_P4_FLAME_CCCR3 0x36b |
| 361 | #define MSR_P4_IQ_CCCR0 0x36c |
| 362 | #define MSR_P4_IQ_CCCR1 0x36d |
| 363 | #define MSR_P4_IQ_CCCR2 0x36e |
| 364 | #define MSR_P4_IQ_CCCR3 0x36f |
| 365 | #define MSR_P4_IQ_CCCR4 0x370 |
| 366 | #define MSR_P4_IQ_CCCR5 0x371 |
| 367 | #define MSR_P4_ALF_ESCR0 0x3ca |
| 368 | #define MSR_P4_ALF_ESCR1 0x3cb |
| 369 | #define MSR_P4_BPU_ESCR0 0x3b2 |
| 370 | #define MSR_P4_BPU_ESCR1 0x3b3 |
| 371 | #define MSR_P4_BSU_ESCR0 0x3a0 |
| 372 | #define MSR_P4_BSU_ESCR1 0x3a1 |
| 373 | #define MSR_P4_CRU_ESCR0 0x3b8 |
| 374 | #define MSR_P4_CRU_ESCR1 0x3b9 |
| 375 | #define MSR_P4_CRU_ESCR2 0x3cc |
| 376 | #define MSR_P4_CRU_ESCR3 0x3cd |
| 377 | #define MSR_P4_CRU_ESCR4 0x3e0 |
| 378 | #define MSR_P4_CRU_ESCR5 0x3e1 |
| 379 | #define MSR_P4_DAC_ESCR0 0x3a8 |
| 380 | #define MSR_P4_DAC_ESCR1 0x3a9 |
| 381 | #define MSR_P4_FIRM_ESCR0 0x3a4 |
| 382 | #define MSR_P4_FIRM_ESCR1 0x3a5 |
| 383 | #define MSR_P4_FLAME_ESCR0 0x3a6 |
| 384 | #define MSR_P4_FLAME_ESCR1 0x3a7 |
| 385 | #define MSR_P4_FSB_ESCR0 0x3a2 |
| 386 | #define MSR_P4_FSB_ESCR1 0x3a3 |
| 387 | #define MSR_P4_IQ_ESCR0 0x3ba |
| 388 | #define MSR_P4_IQ_ESCR1 0x3bb |
| 389 | #define MSR_P4_IS_ESCR0 0x3b4 |
| 390 | #define MSR_P4_IS_ESCR1 0x3b5 |
| 391 | #define MSR_P4_ITLB_ESCR0 0x3b6 |
| 392 | #define MSR_P4_ITLB_ESCR1 0x3b7 |
| 393 | #define MSR_P4_IX_ESCR0 0x3c8 |
| 394 | #define MSR_P4_IX_ESCR1 0x3c9 |
| 395 | #define MSR_P4_MOB_ESCR0 0x3aa |
| 396 | #define MSR_P4_MOB_ESCR1 0x3ab |
| 397 | #define MSR_P4_MS_ESCR0 0x3c0 |
| 398 | #define MSR_P4_MS_ESCR1 0x3c1 |
| 399 | #define MSR_P4_PMH_ESCR0 0x3ac |
| 400 | #define MSR_P4_PMH_ESCR1 0x3ad |
| 401 | #define MSR_P4_RAT_ESCR0 0x3bc |
| 402 | #define MSR_P4_RAT_ESCR1 0x3bd |
| 403 | #define MSR_P4_SAAT_ESCR0 0x3ae |
| 404 | #define MSR_P4_SAAT_ESCR1 0x3af |
| 405 | #define MSR_P4_SSU_ESCR0 0x3be |
| 406 | #define MSR_P4_SSU_ESCR1 0x3bf /* guess: not defined in manual */ |
| 407 | #define MSR_P4_TBPU_ESCR0 0x3c2 |
| 408 | #define MSR_P4_TBPU_ESCR1 0x3c3 |
| 409 | #define MSR_P4_TC_ESCR0 0x3c4 |
| 410 | #define MSR_P4_TC_ESCR1 0x3c5 |
| 411 | #define MSR_P4_U2L_ESCR0 0x3b0 |
| 412 | #define MSR_P4_U2L_ESCR1 0x3b1 |
| 413 | |
Stephane Eranian | 86efef5 | 2006-12-07 02:14:02 +0100 | [diff] [blame] | 414 | /* Intel Core-based CPU performance counters */ |
| 415 | #define MSR_CORE_PERF_FIXED_CTR0 0x309 |
| 416 | #define MSR_CORE_PERF_FIXED_CTR1 0x30a |
| 417 | #define MSR_CORE_PERF_FIXED_CTR2 0x30b |
| 418 | #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x38d |
| 419 | #define MSR_CORE_PERF_GLOBAL_STATUS 0x38e |
| 420 | #define MSR_CORE_PERF_GLOBAL_CTRL 0x38f |
| 421 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x390 |
| 422 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 423 | #endif |