Thomas Gleixner | 1f7afb0 | 2007-10-15 23:28:20 +0200 | [diff] [blame] | 1 | #ifndef _ASM_X86_EDAC_H |
| 2 | #define _ASM_X86_EDAC_H |
| 3 | |
| 4 | /* ECC atomic, DMA, SMP and interrupt safe scrub function */ |
| 5 | |
| 6 | static __inline__ void atomic_scrub(void *va, u32 size) |
| 7 | { |
| 8 | u32 i, *virt_addr = va; |
| 9 | |
| 10 | /* |
| 11 | * Very carefully read and write to memory atomically so we |
| 12 | * are interrupt, DMA and SMP safe. |
| 13 | */ |
| 14 | for (i = 0; i < size / 4; i++, virt_addr++) |
| 15 | __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); |
| 16 | } |
| 17 | |
Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 18 | #endif |