H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_CACHE_H |
| 2 | #define _ASM_X86_CACHE_H |
Thomas Gleixner | 9bfa23d | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 3 | |
Tim Abbott | abe1ee3 | 2009-09-20 18:14:15 -0400 | [diff] [blame] | 4 | #include <linux/linkage.h> |
| 5 | |
Thomas Gleixner | 9bfa23d | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 6 | /* L1 cache line size */ |
| 7 | #define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT) |
| 8 | #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) |
| 9 | |
Denys Vlasenko | 54cb27a | 2010-02-20 01:03:44 +0100 | [diff] [blame] | 10 | #define __read_mostly __attribute__((__section__(".data..read_mostly"))) |
Thomas Gleixner | 9bfa23d | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 11 | |
Jan Beulich | 350f8f5 | 2009-11-13 11:54:40 +0000 | [diff] [blame] | 12 | #define INTERNODE_CACHE_SHIFT CONFIG_X86_INTERNODE_CACHE_SHIFT |
| 13 | #define INTERNODE_CACHE_BYTES (1 << INTERNODE_CACHE_SHIFT) |
| 14 | |
Thomas Gleixner | 9bfa23d | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 15 | #ifdef CONFIG_X86_VSMP |
Thomas Gleixner | 9bfa23d | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 16 | #ifdef CONFIG_SMP |
| 17 | #define __cacheline_aligned_in_smp \ |
Jan Beulich | 350f8f5 | 2009-11-13 11:54:40 +0000 | [diff] [blame] | 18 | __attribute__((__aligned__(INTERNODE_CACHE_BYTES))) \ |
Tim Abbott | abe1ee3 | 2009-09-20 18:14:15 -0400 | [diff] [blame] | 19 | __page_aligned_data |
Thomas Gleixner | 9bfa23d | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 20 | #endif |
| 21 | #endif |
| 22 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 23 | #endif /* _ASM_X86_CACHE_H */ |