David Howells | a5401ee | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | #ifndef _H8300_BARRIER_H |
| 2 | #define _H8300_BARRIER_H |
| 3 | |
| 4 | #define nop() asm volatile ("nop"::) |
| 5 | |
| 6 | /* |
| 7 | * Force strict CPU ordering. |
| 8 | * Not really required on H8... |
| 9 | */ |
| 10 | #define mb() asm volatile ("" : : :"memory") |
| 11 | #define rmb() asm volatile ("" : : :"memory") |
| 12 | #define wmb() asm volatile ("" : : :"memory") |
| 13 | #define set_mb(var, value) do { xchg(&var, value); } while (0) |
| 14 | |
Jiang Liu | c805a5b | 2013-03-22 00:31:44 +0800 | [diff] [blame] | 15 | #define read_barrier_depends() do { } while (0) |
| 16 | |
David Howells | a5401ee | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 17 | #ifdef CONFIG_SMP |
| 18 | #define smp_mb() mb() |
| 19 | #define smp_rmb() rmb() |
| 20 | #define smp_wmb() wmb() |
| 21 | #define smp_read_barrier_depends() read_barrier_depends() |
| 22 | #else |
| 23 | #define smp_mb() barrier() |
| 24 | #define smp_rmb() barrier() |
| 25 | #define smp_wmb() barrier() |
| 26 | #define smp_read_barrier_depends() do { } while(0) |
| 27 | #endif |
| 28 | |
| 29 | #endif /* _H8300_BARRIER_H */ |