blob: 3ff5642d9788490167a87629fe6774297adbd67d [file] [log] [blame]
David Howells9f97da72012-03-28 18:30:01 +01001#ifndef __ASM_BARRIER_H
2#define __ASM_BARRIER_H
3
4#ifndef __ASSEMBLY__
5
6#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
7
8#if __LINUX_ARM_ARCH__ >= 7 || \
9 (__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
10#define sev() __asm__ __volatile__ ("sev" : : : "memory")
11#define wfe() __asm__ __volatile__ ("wfe" : : : "memory")
12#define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
13#endif
14
15#if __LINUX_ARM_ARCH__ >= 7
Will Deacon3ea12802013-05-10 18:07:19 +010016#define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
17#define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
18#define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
David Howells9f97da72012-03-28 18:30:01 +010019#elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
Will Deacon3ea12802013-05-10 18:07:19 +010020#define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
David Howells9f97da72012-03-28 18:30:01 +010021 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010022#define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
David Howells9f97da72012-03-28 18:30:01 +010023 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010024#define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
David Howells9f97da72012-03-28 18:30:01 +010025 : : "r" (0) : "memory")
26#elif defined(CONFIG_CPU_FA526)
Will Deacon3ea12802013-05-10 18:07:19 +010027#define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
David Howells9f97da72012-03-28 18:30:01 +010028 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010029#define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
David Howells9f97da72012-03-28 18:30:01 +010030 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010031#define dmb(x) __asm__ __volatile__ ("" : : : "memory")
David Howells9f97da72012-03-28 18:30:01 +010032#else
Will Deacon3ea12802013-05-10 18:07:19 +010033#define isb(x) __asm__ __volatile__ ("" : : : "memory")
34#define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
David Howells9f97da72012-03-28 18:30:01 +010035 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010036#define dmb(x) __asm__ __volatile__ ("" : : : "memory")
David Howells9f97da72012-03-28 18:30:01 +010037#endif
38
Russell Kingf8130902015-06-01 23:44:46 +010039#ifdef CONFIG_ARM_HEAVY_MB
Russell King4e1f8a62015-06-03 13:10:16 +010040extern void (*soc_mb)(void);
Russell Kingf8130902015-06-01 23:44:46 +010041extern void arm_heavy_mb(void);
42#define __arm_heavy_mb(x...) do { dsb(x); arm_heavy_mb(); } while (0)
43#else
44#define __arm_heavy_mb(x...) dsb(x)
45#endif
46
David Howells9f97da72012-03-28 18:30:01 +010047#ifdef CONFIG_ARCH_HAS_BARRIERS
48#include <mach/barriers.h>
49#elif defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
Russell Kingf8130902015-06-01 23:44:46 +010050#define mb() __arm_heavy_mb()
David Howells9f97da72012-03-28 18:30:01 +010051#define rmb() dsb()
Russell Kingf8130902015-06-01 23:44:46 +010052#define wmb() __arm_heavy_mb(st)
Alexander Duyck1077fa32014-12-11 15:02:06 -080053#define dma_rmb() dmb(osh)
54#define dma_wmb() dmb(oshst)
David Howells9f97da72012-03-28 18:30:01 +010055#else
Rob Herring48aa820f2012-08-21 12:26:24 +020056#define mb() barrier()
57#define rmb() barrier()
58#define wmb() barrier()
Alexander Duyck1077fa32014-12-11 15:02:06 -080059#define dma_rmb() barrier()
60#define dma_wmb() barrier()
David Howells9f97da72012-03-28 18:30:01 +010061#endif
62
63#ifndef CONFIG_SMP
64#define smp_mb() barrier()
65#define smp_rmb() barrier()
66#define smp_wmb() barrier()
67#else
Will Deacon3ea12802013-05-10 18:07:19 +010068#define smp_mb() dmb(ish)
69#define smp_rmb() smp_mb()
70#define smp_wmb() dmb(ishst)
David Howells9f97da72012-03-28 18:30:01 +010071#endif
72
Peter Zijlstra47933ad2013-11-06 14:57:36 +010073#define smp_store_release(p, v) \
74do { \
75 compiletime_assert_atomic_type(*p); \
76 smp_mb(); \
Andrey Konovalov76695af2015-08-02 17:11:04 +020077 WRITE_ONCE(*p, v); \
Peter Zijlstra47933ad2013-11-06 14:57:36 +010078} while (0)
79
80#define smp_load_acquire(p) \
81({ \
Andrey Konovalov76695af2015-08-02 17:11:04 +020082 typeof(*p) ___p1 = READ_ONCE(*p); \
Peter Zijlstra47933ad2013-11-06 14:57:36 +010083 compiletime_assert_atomic_type(*p); \
84 smp_mb(); \
85 ___p1; \
86})
87
David Howells9f97da72012-03-28 18:30:01 +010088#define read_barrier_depends() do { } while(0)
89#define smp_read_barrier_depends() do { } while(0)
90
Peter Zijlstrab92b8b32015-05-12 10:51:55 +020091#define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0)
David Howells9f97da72012-03-28 18:30:01 +010092
Peter Zijlstra030d0172014-03-12 17:11:00 +010093#define smp_mb__before_atomic() smp_mb()
94#define smp_mb__after_atomic() smp_mb()
95
David Howells9f97da72012-03-28 18:30:01 +010096#endif /* !__ASSEMBLY__ */
97#endif /* __ASM_BARRIER_H */