H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_EDAC_H |
| 2 | #define _ASM_X86_EDAC_H |
Thomas Gleixner | 1f7afb0 | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 3 | |
| 4 | /* ECC atomic, DMA, SMP and interrupt safe scrub function */ |
| 5 | |
Joe Perches | 451dd98 | 2008-03-23 01:02:06 -0700 | [diff] [blame] | 6 | static inline void atomic_scrub(void *va, u32 size) |
Thomas Gleixner | 1f7afb0 | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 7 | { |
| 8 | u32 i, *virt_addr = va; |
| 9 | |
| 10 | /* |
| 11 | * Very carefully read and write to memory atomically so we |
| 12 | * are interrupt, DMA and SMP safe. |
| 13 | */ |
| 14 | for (i = 0; i < size / 4; i++, virt_addr++) |
Joe Perches | 451dd98 | 2008-03-23 01:02:06 -0700 | [diff] [blame] | 15 | asm volatile("lock; addl $0, %0"::"m" (*virt_addr)); |
Thomas Gleixner | 1f7afb0 | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 16 | } |
| 17 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 18 | #endif /* _ASM_X86_EDAC_H */ |