blob: 6c2327e1c7323d79831af30bb296c55a9409e9bd [file] [log] [blame]
David Howells9f97da72012-03-28 18:30:01 +01001#ifndef __ASM_BARRIER_H
2#define __ASM_BARRIER_H
3
4#ifndef __ASSEMBLY__
R Sricharanb7782d32012-03-30 14:27:43 +05305#include <asm/outercache.h>
David Howells9f97da72012-03-28 18:30:01 +01006
7#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
8
9#if __LINUX_ARM_ARCH__ >= 7 || \
10 (__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
11#define sev() __asm__ __volatile__ ("sev" : : : "memory")
12#define wfe() __asm__ __volatile__ ("wfe" : : : "memory")
13#define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
14#endif
15
16#if __LINUX_ARM_ARCH__ >= 7
Will Deacon3ea12802013-05-10 18:07:19 +010017#define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
18#define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
19#define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
David Howells9f97da72012-03-28 18:30:01 +010020#elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
Will Deacon3ea12802013-05-10 18:07:19 +010021#define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
David Howells9f97da72012-03-28 18:30:01 +010022 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010023#define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
David Howells9f97da72012-03-28 18:30:01 +010024 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010025#define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
David Howells9f97da72012-03-28 18:30:01 +010026 : : "r" (0) : "memory")
27#elif defined(CONFIG_CPU_FA526)
Will Deacon3ea12802013-05-10 18:07:19 +010028#define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
David Howells9f97da72012-03-28 18:30:01 +010029 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010030#define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
David Howells9f97da72012-03-28 18:30:01 +010031 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010032#define dmb(x) __asm__ __volatile__ ("" : : : "memory")
David Howells9f97da72012-03-28 18:30:01 +010033#else
Will Deacon3ea12802013-05-10 18:07:19 +010034#define isb(x) __asm__ __volatile__ ("" : : : "memory")
35#define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
David Howells9f97da72012-03-28 18:30:01 +010036 : : "r" (0) : "memory")
Will Deacon3ea12802013-05-10 18:07:19 +010037#define dmb(x) __asm__ __volatile__ ("" : : : "memory")
David Howells9f97da72012-03-28 18:30:01 +010038#endif
39
40#ifdef CONFIG_ARCH_HAS_BARRIERS
41#include <mach/barriers.h>
42#elif defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
David Howells9f97da72012-03-28 18:30:01 +010043#define mb() do { dsb(); outer_sync(); } while (0)
44#define rmb() dsb()
Will Deacon3ea12802013-05-10 18:07:19 +010045#define wmb() do { dsb(st); outer_sync(); } while (0)
Alexander Duyck1077fa32014-12-11 15:02:06 -080046#define dma_rmb() dmb(osh)
47#define dma_wmb() dmb(oshst)
David Howells9f97da72012-03-28 18:30:01 +010048#else
Rob Herring48aa820f2012-08-21 12:26:24 +020049#define mb() barrier()
50#define rmb() barrier()
51#define wmb() barrier()
Alexander Duyck1077fa32014-12-11 15:02:06 -080052#define dma_rmb() barrier()
53#define dma_wmb() barrier()
David Howells9f97da72012-03-28 18:30:01 +010054#endif
55
56#ifndef CONFIG_SMP
57#define smp_mb() barrier()
58#define smp_rmb() barrier()
59#define smp_wmb() barrier()
60#else
Will Deacon3ea12802013-05-10 18:07:19 +010061#define smp_mb() dmb(ish)
62#define smp_rmb() smp_mb()
63#define smp_wmb() dmb(ishst)
David Howells9f97da72012-03-28 18:30:01 +010064#endif
65
Peter Zijlstra47933ad2013-11-06 14:57:36 +010066#define smp_store_release(p, v) \
67do { \
68 compiletime_assert_atomic_type(*p); \
69 smp_mb(); \
70 ACCESS_ONCE(*p) = (v); \
71} while (0)
72
73#define smp_load_acquire(p) \
74({ \
75 typeof(*p) ___p1 = ACCESS_ONCE(*p); \
76 compiletime_assert_atomic_type(*p); \
77 smp_mb(); \
78 ___p1; \
79})
80
David Howells9f97da72012-03-28 18:30:01 +010081#define read_barrier_depends() do { } while(0)
82#define smp_read_barrier_depends() do { } while(0)
83
Peter Zijlstrab92b8b32015-05-12 10:51:55 +020084#define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0)
David Howells9f97da72012-03-28 18:30:01 +010085
Peter Zijlstra030d0172014-03-12 17:11:00 +010086#define smp_mb__before_atomic() smp_mb()
87#define smp_mb__after_atomic() smp_mb()
88
David Howells9f97da72012-03-28 18:30:01 +010089#endif /* !__ASSEMBLY__ */
90#endif /* __ASM_BARRIER_H */