Doug Thompson | 39c2965 | 2007-07-26 10:41:15 -0700 | [diff] [blame] | 1 | #ifndef ASM_EDAC_H |
| 2 | #define ASM_EDAC_H |
| 3 | |
| 4 | /* ECC atomic, DMA, SMP and interrupt safe scrub function */ |
| 5 | |
| 6 | static inline void atomic_scrub(void *va, u32 size) |
| 7 | { |
| 8 | unsigned long *virt_addr = va; |
| 9 | unsigned long temp; |
| 10 | u32 i; |
| 11 | |
Ralf Baechle | 1bfa771 | 2007-08-22 22:42:18 +0100 | [diff] [blame] | 12 | for (i = 0; i < size / sizeof(unsigned long); i++) { |
Doug Thompson | 39c2965 | 2007-07-26 10:41:15 -0700 | [diff] [blame] | 13 | /* |
| 14 | * Very carefully read and write to memory atomically |
| 15 | * so we are interrupt, DMA and SMP safe. |
| 16 | * |
| 17 | * Intel: asm("lock; addl $0, %0"::"m"(*virt_addr)); |
| 18 | */ |
| 19 | |
| 20 | __asm__ __volatile__ ( |
Ralf Baechle | 1bfa771 | 2007-08-22 22:42:18 +0100 | [diff] [blame] | 21 | " .set mips2 \n" |
| 22 | "1: ll %0, %1 # atomic_scrub \n" |
| 23 | " addu %0, $0 \n" |
| 24 | " sc %0, %1 \n" |
| 25 | " beqz %0, 1b \n" |
| 26 | " .set mips0 \n" |
Doug Thompson | 39c2965 | 2007-07-26 10:41:15 -0700 | [diff] [blame] | 27 | : "=&r" (temp), "=m" (*virt_addr) |
| 28 | : "m" (*virt_addr)); |
| 29 | |
Ralf Baechle | 1bfa771 | 2007-08-22 22:42:18 +0100 | [diff] [blame] | 30 | virt_addr++; |
Doug Thompson | 39c2965 | 2007-07-26 10:41:15 -0700 | [diff] [blame] | 31 | } |
| 32 | } |
| 33 | |
| 34 | #endif |