x86: fix asm memory constraints in local_64.h

Use the shorter +m form rather than =m and m.

Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
diff --git a/include/asm-x86/local_64.h b/include/asm-x86/local_64.h
index 92330f8..50e99ed 100644
--- a/include/asm-x86/local_64.h
+++ b/include/asm-x86/local_64.h
@@ -5,32 +5,30 @@
 {
 	__asm__ __volatile__(
 		"incq %0"
-		:"=m" (l->a.counter)
-		:"m" (l->a.counter));
+		:"+m" (l->a.counter));
 }
 
 static inline void local_dec(local_t *l)
 {
 	__asm__ __volatile__(
 		"decq %0"
-		:"=m" (l->a.counter)
-		:"m" (l->a.counter));
+		:"+m" (l->a.counter));
 }
 
 static inline void local_add(long i, local_t *l)
 {
 	__asm__ __volatile__(
 		"addq %1,%0"
-		:"=m" (l->a.counter)
-		:"ir" (i), "m" (l->a.counter));
+		:"+m" (l->a.counter)
+		:"ir" (i));
 }
 
 static inline void local_sub(long i, local_t *l)
 {
 	__asm__ __volatile__(
 		"subq %1,%0"
-		:"=m" (l->a.counter)
-		:"ir" (i), "m" (l->a.counter));
+		:"+m" (l->a.counter)
+		:"ir" (i));
 }
 
 /**
@@ -48,8 +46,8 @@
 
 	__asm__ __volatile__(
 		"subq %2,%0; sete %1"
-		:"=m" (l->a.counter), "=qm" (c)
-		:"ir" (i), "m" (l->a.counter) : "memory");
+		:"+m" (l->a.counter), "=qm" (c)
+		:"ir" (i) : "memory");
 	return c;
 }
 
@@ -67,8 +65,8 @@
 
 	__asm__ __volatile__(
 		"decq %0; sete %1"
-		:"=m" (l->a.counter), "=qm" (c)
-		:"m" (l->a.counter) : "memory");
+		:"+m" (l->a.counter), "=qm" (c)
+		: : "memory");
 	return c != 0;
 }
 
@@ -86,8 +84,8 @@
 
 	__asm__ __volatile__(
 		"incq %0; sete %1"
-		:"=m" (l->a.counter), "=qm" (c)
-		:"m" (l->a.counter) : "memory");
+		:"+m" (l->a.counter), "=qm" (c)
+		: : "memory");
 	return c != 0;
 }
 
@@ -106,8 +104,8 @@
 
 	__asm__ __volatile__(
 		"addq %2,%0; sets %1"
-		:"=m" (l->a.counter), "=qm" (c)
-		:"ir" (i), "m" (l->a.counter) : "memory");
+		:"+m" (l->a.counter), "=qm" (c)
+		:"ir" (i) : "memory");
 	return c;
 }