MIPS: New macro smp_mb__before_llsc.

Replace some instances of smp_llsc_mb() with a new macro
smp_mb__before_llsc().  It is used before ll/sc sequences that are
documented as needing write barrier semantics.

The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(),
so there are no changes in semantics.

Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just
barrier() in the non-SMP case.

Signed-off-by: David Daney <ddaney@caviumnetworks.com>
To: linux-mips@linux-mips.org
Patchwork: http://patchwork.linux-mips.org/patch/851/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index dd75d67..519197e 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -137,7 +137,7 @@
 {
 	int result;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		int temp;
@@ -189,7 +189,7 @@
 {
 	int result;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		int temp;
@@ -249,7 +249,7 @@
 {
 	int result;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		int temp;
@@ -516,7 +516,7 @@
 {
 	long result;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		long temp;
@@ -568,7 +568,7 @@
 {
 	long result;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		long temp;
@@ -628,7 +628,7 @@
 {
 	long result;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		long temp;
@@ -788,9 +788,9 @@
  * atomic*_return operations are serializing but not the non-*_return
  * versions.
  */
-#define smp_mb__before_atomic_dec()	smp_llsc_mb()
+#define smp_mb__before_atomic_dec()	smp_mb__before_llsc()
 #define smp_mb__after_atomic_dec()	smp_llsc_mb()
-#define smp_mb__before_atomic_inc()	smp_llsc_mb()
+#define smp_mb__before_atomic_inc()	smp_mb__before_llsc()
 #define smp_mb__after_atomic_inc()	smp_llsc_mb()
 
 #include <asm-generic/atomic-long.h>
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h
index 91785dc..1a5a51c 100644
--- a/arch/mips/include/asm/barrier.h
+++ b/arch/mips/include/asm/barrier.h
@@ -131,23 +131,26 @@
 #endif /* !CONFIG_CPU_HAS_WB */
 
 #if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
-#define __WEAK_ORDERING_MB	"       sync	\n"
+#define smp_mb()	__asm__ __volatile__("sync" : : :"memory")
+#define smp_rmb()	__asm__ __volatile__("sync" : : :"memory")
+#define smp_wmb()	__asm__ __volatile__("sync" : : :"memory")
 #else
-#define __WEAK_ORDERING_MB	"		\n"
+#define smp_mb()	barrier()
+#define smp_rmb()	barrier()
+#define smp_wmb()	barrier()
 #endif
+
 #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
 #define __WEAK_LLSC_MB		"       sync	\n"
 #else
 #define __WEAK_LLSC_MB		"		\n"
 #endif
 
-#define smp_mb()	__asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-#define smp_rmb()	__asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-#define smp_wmb()	__asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-
 #define set_mb(var, value) \
 	do { var = value; smp_mb(); } while (0)
 
 #define smp_llsc_mb()	__asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
 
+#define smp_mb__before_llsc() smp_llsc_mb()
+
 #endif /* __ASM_BARRIER_H */
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 84a3838..9255cfb 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -42,7 +42,7 @@
 /*
  * clear_bit() doesn't provide any barrier for the compiler.
  */
-#define smp_mb__before_clear_bit()	smp_llsc_mb()
+#define smp_mb__before_clear_bit()	smp_mb__before_llsc()
 #define smp_mb__after_clear_bit()	smp_llsc_mb()
 
 /*
@@ -258,7 +258,7 @@
 	unsigned short bit = nr & SZLONG_MASK;
 	unsigned long res;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -395,7 +395,7 @@
 	unsigned short bit = nr & SZLONG_MASK;
 	unsigned long res;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -485,7 +485,7 @@
 	unsigned short bit = nr & SZLONG_MASK;
 	unsigned long res;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 815a438..ed9aaaaf 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -72,14 +72,14 @@
  */
 extern void __cmpxchg_called_with_bad_pointer(void);
 
-#define __cmpxchg(ptr, old, new, barrier)				\
+#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier)		\
 ({									\
 	__typeof__(ptr) __ptr = (ptr);					\
 	__typeof__(*(ptr)) __old = (old);				\
 	__typeof__(*(ptr)) __new = (new);				\
 	__typeof__(*(ptr)) __res = 0;					\
 									\
-	barrier;							\
+	pre_barrier;							\
 									\
 	switch (sizeof(*(__ptr))) {					\
 	case 4:								\
@@ -96,13 +96,13 @@
 		break;							\
 	}								\
 									\
-	barrier;							\
+	post_barrier;							\
 									\
 	__res;								\
 })
 
-#define cmpxchg(ptr, old, new)		__cmpxchg(ptr, old, new, smp_llsc_mb())
-#define cmpxchg_local(ptr, old, new)	__cmpxchg(ptr, old, new, )
+#define cmpxchg(ptr, old, new)		__cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
+#define cmpxchg_local(ptr, old, new)	__cmpxchg(ptr, old, new, , )
 
 #define cmpxchg64(ptr, o, n)						\
   ({									\
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h
index 21ef9ef..5f16696 100644
--- a/arch/mips/include/asm/spinlock.h
+++ b/arch/mips/include/asm/spinlock.h
@@ -138,7 +138,7 @@
 {
 	int tmp;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (R10000_LLSC_WAR) {
 		__asm__ __volatile__ (
@@ -305,7 +305,7 @@
 {
 	unsigned int tmp;
 
-	smp_llsc_mb();
+	smp_mb__before_llsc();
 
 	if (R10000_LLSC_WAR) {
 		__asm__ __volatile__(
diff --git a/arch/mips/include/asm/system.h b/arch/mips/include/asm/system.h
index 83b5509..bb937cc 100644
--- a/arch/mips/include/asm/system.h
+++ b/arch/mips/include/asm/system.h
@@ -95,6 +95,8 @@
 {
 	__u32 retval;
 
+	smp_mb__before_llsc();
+
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		unsigned long dummy;
 
@@ -147,6 +149,8 @@
 {
 	__u64 retval;
 
+	smp_mb__before_llsc();
+
 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 		unsigned long dummy;