Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 1 | /* |
Thomas Gleixner | 2f0798a | 2007-10-12 23:04:23 +0200 | [diff] [blame] | 2 | * x86 TSC related functions |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 3 | */ |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 4 | #ifndef _ASM_X86_TSC_H |
| 5 | #define _ASM_X86_TSC_H |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 6 | |
| 7 | #include <asm/processor.h> |
| 8 | |
Thomas Gleixner | 2f0798a | 2007-10-12 23:04:23 +0200 | [diff] [blame] | 9 | #define NS_SCALE 10 /* 2^10, carefully chosen */ |
| 10 | #define US_SCALE 32 /* 2^32, arbitralrily chosen */ |
| 11 | |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 12 | /* |
| 13 | * Standard way to access the cycle counter. |
| 14 | */ |
| 15 | typedef unsigned long long cycles_t; |
| 16 | |
| 17 | extern unsigned int cpu_khz; |
| 18 | extern unsigned int tsc_khz; |
Glauber de Oliveira Costa | 73018a6 | 2008-01-30 13:31:26 +0100 | [diff] [blame] | 19 | |
| 20 | extern void disable_TSC(void); |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 21 | |
| 22 | static inline cycles_t get_cycles(void) |
| 23 | { |
| 24 | unsigned long long ret = 0; |
| 25 | |
| 26 | #ifndef CONFIG_X86_TSC |
| 27 | if (!cpu_has_tsc) |
| 28 | return 0; |
| 29 | #endif |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 30 | rdtscll(ret); |
Ingo Molnar | 75f2ce0 | 2008-01-30 13:33:24 +0100 | [diff] [blame] | 31 | |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 32 | return ret; |
| 33 | } |
| 34 | |
Hugh Dickins | 9752082 | 2008-04-27 00:39:36 +0100 | [diff] [blame] | 35 | static __always_inline cycles_t vget_cycles(void) |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 36 | { |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 37 | /* |
Andi Kleen | 6d63de8 | 2008-01-30 13:32:39 +0100 | [diff] [blame] | 38 | * We only do VDSOs on TSC capable CPUs, so this shouldnt |
| 39 | * access boot_cpu_data (which is not VDSO-safe): |
Andi Kleen | c5bcb56 | 2007-05-02 19:27:21 +0200 | [diff] [blame] | 40 | */ |
Andi Kleen | 6d63de8 | 2008-01-30 13:32:39 +0100 | [diff] [blame] | 41 | #ifndef CONFIG_X86_TSC |
| 42 | if (!cpu_has_tsc) |
| 43 | return 0; |
Glauber de Oliveira Costa | 4e87173 | 2008-01-30 13:31:03 +0100 | [diff] [blame] | 44 | #endif |
Ingo Molnar | cb9e35d | 2008-11-08 20:27:00 +0100 | [diff] [blame] | 45 | return (cycles_t)__native_read_tsc(); |
Andi Kleen | 6d63de8 | 2008-01-30 13:32:39 +0100 | [diff] [blame] | 46 | } |
Glauber de Oliveira Costa | 4e87173 | 2008-01-30 13:31:03 +0100 | [diff] [blame] | 47 | |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 48 | extern void tsc_init(void); |
john stultz | 5a90cf2 | 2007-05-02 19:27:08 +0200 | [diff] [blame] | 49 | extern void mark_tsc_unstable(char *reason); |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 50 | extern int unsynchronized_tsc(void); |
Thomas Gleixner | 2d82640 | 2009-08-20 17:06:25 +0200 | [diff] [blame] | 51 | extern int check_tsc_unstable(void); |
| 52 | extern unsigned long native_calibrate_tsc(void); |
Andres Salomon | 2272b0e | 2007-03-06 01:42:05 -0800 | [diff] [blame] | 53 | |
| 54 | /* |
| 55 | * Boot-time check whether the TSCs are synchronized across |
| 56 | * all CPUs/cores: |
| 57 | */ |
| 58 | extern void check_tsc_sync_source(int cpu); |
| 59 | extern void check_tsc_sync_target(void); |
| 60 | |
Thomas Gleixner | 80ca9c9 | 2008-01-30 13:30:18 +0100 | [diff] [blame] | 61 | extern int notsc_setup(char *); |
Suresh Siddha | cd7240c | 2010-08-19 17:03:38 -0700 | [diff] [blame] | 62 | extern void save_sched_clock_state(void); |
| 63 | extern void restore_sched_clock_state(void); |
Thomas Gleixner | d371698 | 2007-10-12 23:04:06 +0200 | [diff] [blame] | 64 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 65 | #endif /* _ASM_X86_TSC_H */ |