[MIPS] Make support for weakly ordered LL/SC a config option.

None of weakly ordered processor supported in tree need this but it seems
like this could change ...

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 1b60624..7d80037 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -138,7 +138,7 @@
 {
 	unsigned long result;
 
-	smp_mb();
+	smp_llsc_mb();
 
 	if (cpu_has_llsc && R10000_LLSC_WAR) {
 		unsigned long temp;
@@ -181,7 +181,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return result;
 }
@@ -190,7 +190,7 @@
 {
 	unsigned long result;
 
-	smp_mb();
+	smp_llsc_mb();
 
 	if (cpu_has_llsc && R10000_LLSC_WAR) {
 		unsigned long temp;
@@ -233,7 +233,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return result;
 }
@@ -250,7 +250,7 @@
 {
 	unsigned long result;
 
-	smp_mb();
+	smp_llsc_mb();
 
 	if (cpu_has_llsc && R10000_LLSC_WAR) {
 		unsigned long temp;
@@ -302,7 +302,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return result;
 }
@@ -519,7 +519,7 @@
 {
 	unsigned long result;
 
-	smp_mb();
+	smp_llsc_mb();
 
 	if (cpu_has_llsc && R10000_LLSC_WAR) {
 		unsigned long temp;
@@ -562,7 +562,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return result;
 }
@@ -571,7 +571,7 @@
 {
 	unsigned long result;
 
-	smp_mb();
+	smp_llsc_mb();
 
 	if (cpu_has_llsc && R10000_LLSC_WAR) {
 		unsigned long temp;
@@ -614,7 +614,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return result;
 }
@@ -631,7 +631,7 @@
 {
 	unsigned long result;
 
-	smp_mb();
+	smp_llsc_mb();
 
 	if (cpu_has_llsc && R10000_LLSC_WAR) {
 		unsigned long temp;
@@ -683,7 +683,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return result;
 }
@@ -791,10 +791,11 @@
  * atomic*_return operations are serializing but not the non-*_return
  * versions.
  */
-#define smp_mb__before_atomic_dec()	smp_mb()
-#define smp_mb__after_atomic_dec()	smp_mb()
-#define smp_mb__before_atomic_inc()	smp_mb()
-#define smp_mb__after_atomic_inc()	smp_mb()
+#define smp_mb__before_atomic_dec()	smp_llsc_mb()
+#define smp_mb__after_atomic_dec()	smp_llsc_mb()
+#define smp_mb__before_atomic_inc()	smp_llsc_mb()
+#define smp_mb__after_atomic_inc()	smp_llsc_mb()
 
 #include <asm-generic/atomic.h>
+
 #endif /* _ASM_ATOMIC_H */
diff --git a/include/asm-mips/barrier.h b/include/asm-mips/barrier.h
index ed82631..9d8cfbb 100644
--- a/include/asm-mips/barrier.h
+++ b/include/asm-mips/barrier.h
@@ -121,6 +121,11 @@
 #else
 #define __WEAK_ORDERING_MB	"		\n"
 #endif
+#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
+#define __WEAK_LLSC_MB		"       sync	\n"
+#else
+#define __WEAK_LLSC_MB		"		\n"
+#endif
 
 #define smp_mb()	__asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
 #define smp_rmb()	__asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
@@ -129,4 +134,8 @@
 #define set_mb(var, value) \
 	do { var = value; smp_mb(); } while (0)
 
+#define smp_llsc_mb()	__asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+#define smp_llsc_rmb()	__asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+#define smp_llsc_wmb()	__asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+
 #endif /* __ASM_BARRIER_H */
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index d9e81af..148bc79 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -38,8 +38,8 @@
 /*
  * clear_bit() doesn't provide any barrier for the compiler.
  */
-#define smp_mb__before_clear_bit()	smp_mb()
-#define smp_mb__after_clear_bit()	smp_mb()
+#define smp_mb__before_clear_bit()	smp_llsc_mb()
+#define smp_mb__after_clear_bit()	smp_llsc_mb()
 
 /*
  * set_bit - Atomically set a bit in memory
@@ -289,7 +289,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return res != 0;
 }
@@ -377,7 +377,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return res != 0;
 }
@@ -445,7 +445,7 @@
 		raw_local_irq_restore(flags);
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return res != 0;
 }
diff --git a/include/asm-mips/futex.h b/include/asm-mips/futex.h
index 47e5679..b623882 100644
--- a/include/asm-mips/futex.h
+++ b/include/asm-mips/futex.h
@@ -29,7 +29,7 @@
 		"	.set	mips3				\n"	\
 		"2:	sc	$1, %2				\n"	\
 		"	beqzl	$1, 1b				\n"	\
-		__WEAK_ORDERING_MB					\
+		__WEAK_LLSC_MB						\
 		"3:						\n"	\
 		"	.set	pop				\n"	\
 		"	.set	mips0				\n"	\
@@ -55,7 +55,7 @@
 		"	.set	mips3				\n"	\
 		"2:	sc	$1, %2				\n"	\
 		"	beqz	$1, 1b				\n"	\
-		__WEAK_ORDERING_MB					\
+		__WEAK_LLSC_MB						\
 		"3:						\n"	\
 		"	.set	pop				\n"	\
 		"	.set	mips0				\n"	\
@@ -152,7 +152,7 @@
 		"	.set	mips3					\n"
 		"2:	sc	$1, %1					\n"
 		"	beqzl	$1, 1b					\n"
-		__WEAK_ORDERING_MB
+		__WEAK_LLSC_MB
 		"3:							\n"
 		"	.set	pop					\n"
 		"	.section .fixup,\"ax\"				\n"
@@ -179,7 +179,7 @@
 		"	.set	mips3					\n"
 		"2:	sc	$1, %1					\n"
 		"	beqz	$1, 1b					\n"
-		__WEAK_ORDERING_MB
+		__WEAK_LLSC_MB
 		"3:							\n"
 		"	.set	pop					\n"
 		"	.section .fixup,\"ax\"				\n"
diff --git a/include/asm-mips/spinlock.h b/include/asm-mips/spinlock.h
index 35e431c..bb89701 100644
--- a/include/asm-mips/spinlock.h
+++ b/include/asm-mips/spinlock.h
@@ -67,7 +67,7 @@
 		: "memory");
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 }
 
 static inline void __raw_spin_unlock(raw_spinlock_t *lock)
@@ -118,7 +118,7 @@
 		: "memory");
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return res == 0;
 }
@@ -183,7 +183,7 @@
 		: "memory");
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 }
 
 /* Note the use of sub, not subu which will make the kernel die with an
@@ -193,7 +193,7 @@
 {
 	unsigned int tmp;
 
-	smp_mb();
+	smp_llsc_mb();
 
 	if (R10000_LLSC_WAR) {
 		__asm__ __volatile__(
@@ -262,7 +262,7 @@
 		: "memory");
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 }
 
 static inline void __raw_write_unlock(raw_rwlock_t *rw)
@@ -293,7 +293,7 @@
 		"	.set	reorder					\n"
 		"	beqzl	%1, 1b					\n"
 		"	 nop						\n"
-		__WEAK_ORDERING_MB
+		__WEAK_LLSC_MB
 		"	li	%2, 1					\n"
 		"2:							\n"
 		: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
@@ -310,7 +310,7 @@
 		"	beqz	%1, 1b					\n"
 		"	 nop						\n"
 		"	.set	reorder					\n"
-		__WEAK_ORDERING_MB
+		__WEAK_LLSC_MB
 		"	li	%2, 1					\n"
 		"2:							\n"
 		: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
@@ -336,7 +336,7 @@
 		"	sc	%1, %0					\n"
 		"	beqzl	%1, 1b					\n"
 		"	 nop						\n"
-		__WEAK_ORDERING_MB
+		__WEAK_LLSC_MB
 		"	li	%2, 1					\n"
 		"	.set	reorder					\n"
 		"2:							\n"
@@ -354,7 +354,7 @@
 		"	beqz	%1, 3f					\n"
 		"	 li	%2, 1					\n"
 		"2:							\n"
-		__WEAK_ORDERING_MB
+		__WEAK_LLSC_MB
 		"	.subsection 2					\n"
 		"3:	b	1b					\n"
 		"	 li	%2, 0					\n"
diff --git a/include/asm-mips/system.h b/include/asm-mips/system.h
index 7633916..eba2e3d 100644
--- a/include/asm-mips/system.h
+++ b/include/asm-mips/system.h
@@ -117,7 +117,7 @@
 		raw_local_irq_restore(flags);	/* implies memory barrier  */
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return retval;
 }
@@ -165,7 +165,7 @@
 		raw_local_irq_restore(flags);	/* implies memory barrier  */
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return retval;
 }
@@ -246,7 +246,7 @@
 		raw_local_irq_restore(flags);	/* implies memory barrier  */
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return retval;
 }
@@ -352,7 +352,7 @@
 		raw_local_irq_restore(flags);	/* implies memory barrier  */
 	}
 
-	smp_mb();
+	smp_llsc_mb();
 
 	return retval;
 }