Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Generic barrier definitions, originally based on MN10300 definitions. |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 3 | * |
| 4 | * It should be possible to use these on really simple architectures, |
| 5 | * but it serves more as a starting point for new ports. |
| 6 | * |
| 7 | * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. |
| 8 | * Written by David Howells (dhowells@redhat.com) |
| 9 | * |
| 10 | * This program is free software; you can redistribute it and/or |
| 11 | * modify it under the terms of the GNU General Public Licence |
| 12 | * as published by the Free Software Foundation; either version |
| 13 | * 2 of the Licence, or (at your option) any later version. |
| 14 | */ |
| 15 | #ifndef __ASM_GENERIC_BARRIER_H |
| 16 | #define __ASM_GENERIC_BARRIER_H |
| 17 | |
| 18 | #ifndef __ASSEMBLY__ |
| 19 | |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 20 | #include <linux/compiler.h> |
| 21 | |
| 22 | #ifndef nop |
| 23 | #define nop() asm volatile ("nop") |
| 24 | #endif |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 25 | |
| 26 | /* |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 27 | * Force strict CPU ordering. And yes, this is required on UP too when we're |
| 28 | * talking to devices. |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 29 | * |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 30 | * Fall back to compiler barriers if nothing better is provided. |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 31 | */ |
| 32 | |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 33 | #ifndef mb |
| 34 | #define mb() barrier() |
| 35 | #endif |
| 36 | |
| 37 | #ifndef rmb |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 38 | #define rmb() mb() |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 39 | #endif |
| 40 | |
| 41 | #ifndef wmb |
| 42 | #define wmb() mb() |
| 43 | #endif |
| 44 | |
Alexander Duyck | 1077fa3 | 2014-12-11 15:02:06 -0800 | [diff] [blame] | 45 | #ifndef dma_rmb |
| 46 | #define dma_rmb() rmb() |
| 47 | #endif |
| 48 | |
| 49 | #ifndef dma_wmb |
| 50 | #define dma_wmb() wmb() |
| 51 | #endif |
| 52 | |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 53 | #ifndef read_barrier_depends |
| 54 | #define read_barrier_depends() do { } while (0) |
| 55 | #endif |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 56 | |
| 57 | #ifdef CONFIG_SMP |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 58 | |
| 59 | #ifndef smp_mb |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 60 | #define smp_mb() mb() |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 61 | #endif |
| 62 | |
| 63 | #ifndef smp_rmb |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 64 | #define smp_rmb() rmb() |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 65 | #endif |
| 66 | |
| 67 | #ifndef smp_wmb |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 68 | #define smp_wmb() wmb() |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 69 | #endif |
| 70 | |
| 71 | #ifndef smp_read_barrier_depends |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 72 | #define smp_read_barrier_depends() read_barrier_depends() |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 73 | #endif |
| 74 | |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 75 | #else |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 76 | #ifndef smp_mb |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 77 | #define smp_mb() barrier() |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 78 | #endif |
| 79 | |
| 80 | #ifndef smp_rmb |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 81 | #define smp_rmb() barrier() |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 82 | #endif |
| 83 | |
| 84 | #ifndef smp_wmb |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 85 | #define smp_wmb() barrier() |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 86 | #endif |
| 87 | |
| 88 | #ifndef smp_read_barrier_depends |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 89 | #define smp_read_barrier_depends() do { } while (0) |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 90 | #endif |
| 91 | |
Vineet Gupta | 470c27e | 2015-03-11 21:42:02 +0530 | [diff] [blame^] | 92 | #endif |
| 93 | |
Peter Zijlstra | 93ea02b | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 94 | #ifndef set_mb |
| 95 | #define set_mb(var, value) do { (var) = (value); mb(); } while (0) |
| 96 | #endif |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 97 | |
Peter Zijlstra | febdbfe | 2014-02-06 18:16:07 +0100 | [diff] [blame] | 98 | #ifndef smp_mb__before_atomic |
| 99 | #define smp_mb__before_atomic() smp_mb() |
| 100 | #endif |
| 101 | |
| 102 | #ifndef smp_mb__after_atomic |
| 103 | #define smp_mb__after_atomic() smp_mb() |
| 104 | #endif |
| 105 | |
Peter Zijlstra | 47933ad | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 106 | #define smp_store_release(p, v) \ |
| 107 | do { \ |
| 108 | compiletime_assert_atomic_type(*p); \ |
| 109 | smp_mb(); \ |
| 110 | ACCESS_ONCE(*p) = (v); \ |
| 111 | } while (0) |
| 112 | |
| 113 | #define smp_load_acquire(p) \ |
| 114 | ({ \ |
| 115 | typeof(*p) ___p1 = ACCESS_ONCE(*p); \ |
| 116 | compiletime_assert_atomic_type(*p); \ |
| 117 | smp_mb(); \ |
| 118 | ___p1; \ |
| 119 | }) |
| 120 | |
David Howells | 885df91 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 121 | #endif /* !__ASSEMBLY__ */ |
| 122 | #endif /* __ASM_GENERIC_BARRIER_H */ |