Vegard Nossum | 77ef50a | 2008-06-18 17:08:48 +0200 | [diff] [blame] | 1 | #ifndef ASM_X86__CACHE_H |
| 2 | #define ASM_X86__CACHE_H |
Thomas Gleixner | 9bfa23d | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 3 | |
| 4 | /* L1 cache line size */ |
| 5 | #define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT) |
| 6 | #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) |
| 7 | |
| 8 | #define __read_mostly __attribute__((__section__(".data.read_mostly"))) |
| 9 | |
| 10 | #ifdef CONFIG_X86_VSMP |
| 11 | /* vSMP Internode cacheline shift */ |
| 12 | #define INTERNODE_CACHE_SHIFT (12) |
| 13 | #ifdef CONFIG_SMP |
| 14 | #define __cacheline_aligned_in_smp \ |
| 15 | __attribute__((__aligned__(1 << (INTERNODE_CACHE_SHIFT)))) \ |
| 16 | __attribute__((__section__(".data.page_aligned"))) |
| 17 | #endif |
| 18 | #endif |
| 19 | |
Vegard Nossum | 77ef50a | 2008-06-18 17:08:48 +0200 | [diff] [blame] | 20 | #endif /* ASM_X86__CACHE_H */ |