blob: 94605c0e9ceebc0058b18b995a5ac4f73a0d2aa6 [file] [log] [blame]
Andres Salomon2272b0e2007-03-06 01:42:05 -08001/*
Thomas Gleixner2f0798a2007-10-12 23:04:23 +02002 * x86 TSC related functions
Andres Salomon2272b0e2007-03-06 01:42:05 -08003 */
H. Peter Anvin1965aae2008-10-22 22:26:29 -07004#ifndef _ASM_X86_TSC_H
5#define _ASM_X86_TSC_H
Andres Salomon2272b0e2007-03-06 01:42:05 -08006
7#include <asm/processor.h>
8
Thomas Gleixner2f0798a2007-10-12 23:04:23 +02009#define NS_SCALE 10 /* 2^10, carefully chosen */
10#define US_SCALE 32 /* 2^32, arbitralrily chosen */
11
Andres Salomon2272b0e2007-03-06 01:42:05 -080012/*
13 * Standard way to access the cycle counter.
14 */
15typedef unsigned long long cycles_t;
16
17extern unsigned int cpu_khz;
18extern unsigned int tsc_khz;
Glauber de Oliveira Costa73018a62008-01-30 13:31:26 +010019
20extern void disable_TSC(void);
Andres Salomon2272b0e2007-03-06 01:42:05 -080021
22static inline cycles_t get_cycles(void)
23{
24 unsigned long long ret = 0;
25
26#ifndef CONFIG_X86_TSC
27 if (!cpu_has_tsc)
28 return 0;
29#endif
Andres Salomon2272b0e2007-03-06 01:42:05 -080030 rdtscll(ret);
Ingo Molnar75f2ce02008-01-30 13:33:24 +010031
Andres Salomon2272b0e2007-03-06 01:42:05 -080032 return ret;
33}
34
Hugh Dickins97520822008-04-27 00:39:36 +010035static __always_inline cycles_t vget_cycles(void)
Andres Salomon2272b0e2007-03-06 01:42:05 -080036{
Andres Salomon2272b0e2007-03-06 01:42:05 -080037 /*
Lucas De Marchi0d2eb442011-03-17 16:24:16 -030038 * We only do VDSOs on TSC capable CPUs, so this shouldn't
Andi Kleen6d63de82008-01-30 13:32:39 +010039 * access boot_cpu_data (which is not VDSO-safe):
Andi Kleenc5bcb562007-05-02 19:27:21 +020040 */
Andi Kleen6d63de82008-01-30 13:32:39 +010041#ifndef CONFIG_X86_TSC
42 if (!cpu_has_tsc)
43 return 0;
Glauber de Oliveira Costa4e871732008-01-30 13:31:03 +010044#endif
Ingo Molnarcb9e35d2008-11-08 20:27:00 +010045 return (cycles_t)__native_read_tsc();
Andi Kleen6d63de82008-01-30 13:32:39 +010046}
Glauber de Oliveira Costa4e871732008-01-30 13:31:03 +010047
Andres Salomon2272b0e2007-03-06 01:42:05 -080048extern void tsc_init(void);
john stultz5a90cf22007-05-02 19:27:08 +020049extern void mark_tsc_unstable(char *reason);
Andres Salomon2272b0e2007-03-06 01:42:05 -080050extern int unsynchronized_tsc(void);
Thomas Gleixner2d826402009-08-20 17:06:25 +020051extern int check_tsc_unstable(void);
Adrian Hunterc73deb62013-06-28 16:22:18 +030052extern int check_tsc_disabled(void);
Thomas Gleixner2d826402009-08-20 17:06:25 +020053extern unsigned long native_calibrate_tsc(void);
Andres Salomon2272b0e2007-03-06 01:42:05 -080054
Suresh Siddha28a00182011-11-04 15:42:17 -070055extern int tsc_clocksource_reliable;
56
Andres Salomon2272b0e2007-03-06 01:42:05 -080057/*
58 * Boot-time check whether the TSCs are synchronized across
59 * all CPUs/cores:
60 */
61extern void check_tsc_sync_source(int cpu);
62extern void check_tsc_sync_target(void);
63
Thomas Gleixner80ca9c92008-01-30 13:30:18 +010064extern int notsc_setup(char *);
Marcelo Tosattib74f05d2012-02-13 11:07:27 -020065extern void tsc_save_sched_clock_state(void);
66extern void tsc_restore_sched_clock_state(void);
Thomas Gleixnerd3716982007-10-12 23:04:06 +020067
Bin Gao7da7c152013-10-21 09:16:33 -070068/* MSR based TSC calibration for Intel Atom SoC platforms */
Thomas Gleixner5f0e0302014-02-19 13:52:29 +020069unsigned long try_msr_calibrate_tsc(void);
Bin Gao7da7c152013-10-21 09:16:33 -070070
H. Peter Anvin1965aae2008-10-22 22:26:29 -070071#endif /* _ASM_X86_TSC_H */